http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index 9150a48..d1c31de 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -296,7 +296,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
         if (hfileSizeMB > 0.0 && kylinConfig.isDevEnv()) {
             hfileSizeMB = mbPerRegion / 2;
         }
-        
+
         int compactionThreshold = 
Integer.valueOf(hbaseConf.get("hbase.hstore.compactionThreshold", "3"));
         logger.info("hbase.hstore.compactionThreshold is " + 
compactionThreshold);
         if (hfileSizeMB > 0.0 && hfileSizeMB * compactionThreshold < 
mbPerRegion) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
index 19f37ec..e4a9c1e 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHFileJob.java
@@ -131,7 +131,7 @@ public class CubeHFileJob extends AbstractHadoopJob {
                 }
                 TotalOrderPartitioner.setPartitionFile(job.getConfiguration(), 
path);
                 // The reduce tasks should be one more than partition keys
-                job.setNumReduceTasks(partitionCount+1);
+                job.setNumReduceTasks(partitionCount + 1);
             }
         } else {
             logger.info("File '" + path.toString() + " doesn't exist, will not 
reconfigure hfile Partitions");

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
index c62e1c3..fe65598 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CubeHTableUtil.java
@@ -122,7 +122,7 @@ public class CubeHTableUtil {
             admin.close();
         }
     }
-    
+
     /** create a HTable that has the same performance settings as normal cube 
table, for benchmark purpose */
     public static void createBenchmarkHTable(TableName tableName, String 
cfName) throws IOException {
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
@@ -137,7 +137,7 @@ public class CubeHTableUtil {
 
             HTableDescriptor tableDesc = new HTableDescriptor(tableName);
             tableDesc.setValue(HTableDescriptor.SPLIT_POLICY, 
DisabledRegionSplitPolicy.class.getName());
-            
+
             KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
             tableDesc.addFamily(createColumnFamily(kylinConfig, cfName, 
false));
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
index 472b638..7aecd7e 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/DeprecatedGCStep.java
@@ -27,7 +27,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
index ddc868d..4fe7748 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseCuboidWriter.java
@@ -33,7 +33,9 @@
  */
 package org.apache.kylin.storage.hbase.steps;
 
-import com.google.common.collect.Lists;
+import java.io.IOException;
+import java.util.List;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.HTableInterface;
@@ -50,8 +52,7 @@ import org.apache.kylin.gridtable.GTRecord;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.List;
+import com.google.common.collect.Lists;
 
 /**
  */
@@ -127,15 +128,15 @@ public class HBaseCuboidWriter implements ICuboidWriter {
 
     @Override
     public final void flush() throws IOException {
-            if (!puts.isEmpty()) {
-                long t = System.currentTimeMillis();
-                if (hTable != null) {
-                    hTable.put(puts);
-                    hTable.flushCommits();
-                }
-                logger.info("commit total " + puts.size() + " puts, totally 
cost:" + (System.currentTimeMillis() - t) + "ms");
-                puts.clear();
+        if (!puts.isEmpty()) {
+            long t = System.currentTimeMillis();
+            if (hTable != null) {
+                hTable.put(puts);
+                hTable.flushCommits();
             }
+            logger.info("commit total " + puts.size() + " puts, totally cost:" 
+ (System.currentTimeMillis() - t) + "ms");
+            puts.clear();
+        }
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
index fa62a62..9adaf24 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseStreamingOutput.java
@@ -22,7 +22,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -30,7 +29,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.measure.hllc.HyperLogLogPlusCounter;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.cube.inmemcubing.CompoundCuboidWriter;
@@ -39,11 +37,14 @@ import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.engine.mr.common.BatchConstants;
 import org.apache.kylin.engine.mr.common.CuboidStatsUtil;
 import org.apache.kylin.engine.streaming.IStreamingOutput;
+import org.apache.kylin.measure.hllc.HyperLogLogPlusCounter;
 import org.apache.kylin.metadata.model.IBuildable;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
 /**
  */
 public class HBaseStreamingOutput implements IStreamingOutput {

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
index ff828b0..fbfd582 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HDFSPathGarbageCollectionStep.java
@@ -33,11 +33,11 @@ import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
 import org.apache.kylin.storage.hbase.HBaseConnection;
-
-import com.google.common.collect.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
 /**
  * Created by sunyerui on 15/9/17.
  */
@@ -45,7 +45,6 @@ public class HDFSPathGarbageCollectionStep extends 
AbstractExecutable {
 
     private static final Logger logger = 
LoggerFactory.getLogger(HDFSPathGarbageCollectionStep.class);
 
-
     public static final String TO_DELETE_PATHS = "toDeletePaths";
     private StringBuffer output;
     private JobEngineConfig config;
@@ -61,7 +60,7 @@ public class HDFSPathGarbageCollectionStep extends 
AbstractExecutable {
             config = new JobEngineConfig(context.getConfig());
             List<String> toDeletePaths = getDeletePaths();
             dropHdfsPathOnCluster(toDeletePaths, 
FileSystem.get(HadoopUtil.getCurrentConfiguration()));
-            
+
             if 
(StringUtils.isNotEmpty(context.getConfig().getHBaseClusterFs())) {
                 dropHdfsPathOnCluster(toDeletePaths, 
FileSystem.get(HBaseConnection.getCurrentHBaseConfiguration()));
             }

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
index a1377f2..5b2441c 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/MergeGCStep.java
@@ -25,7 +25,6 @@ import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.common.KylinConfig;
@@ -63,9 +62,9 @@ public class MergeGCStep extends AbstractExecutable {
         } catch (InterruptedException e) {
             logger.warn("Thread interrupted");
         }
-        
+
         logger.info("Start doing merge gc work");
-        
+
         StringBuffer output = new StringBuffer();
         List<String> oldTables = getOldHTables();
         if (oldTables != null && oldTables.size() > 0) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
index ee3b19c..2876e3e 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionJob.java
@@ -35,7 +35,6 @@ import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
 import org.apache.kylin.engine.mr.common.BatchConstants;
-import org.apache.kylin.metadata.model.DataModelDesc;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -97,7 +96,6 @@ public class RangeKeyDistributionJob extends 
AbstractHadoopJob {
 
             this.deletePath(job.getConfiguration(), output);
 
-
             float hfileSizeGB = kylinConfig.getHBaseHFileSizeGB();
             float regionSplitSize = kylinConfig.getKylinHBaseRegionCut();
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
index d58f8e8..c2190fb 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionMapper.java
@@ -38,7 +38,6 @@ public class RangeKeyDistributionMapper extends 
KylinMapper<Text, Text, Text, Lo
 
     private Text lastKey;
 
-
     @Override
     protected void setup(Context context) throws IOException {
         super.bindCurrentConfiguration(context.getConfiguration());

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
index f9dfa49..a4b7956 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RangeKeyDistributionReducer.java
@@ -77,8 +77,7 @@ public class RangeKeyDistributionReducer extends 
KylinReducer<Text, LongWritable
             maxRegionCount = 
Integer.valueOf(context.getConfiguration().get(BatchConstants.CFG_REGION_NUMBER_MAX));
         }
 
-        logger.info("Chosen cut for htable is " + cut + ", max region count=" 
+ maxRegionCount
-            + ", min region count=" + minRegionCount + ", hfile size=" + 
hfileSizeGB);
+        logger.info("Chosen cut for htable is " + cut + ", max region count=" 
+ maxRegionCount + ", min region count=" + minRegionCount + ", hfile size=" + 
hfileSizeGB);
 
         // add empty key at position 0
         gbPoints.add(new Text());
@@ -116,9 +115,7 @@ public class RangeKeyDistributionReducer extends 
KylinReducer<Text, LongWritable
         System.out.println(hfilePerRegion + " hfile per region");
 
         Path hfilePartitionFile = new Path(output + "/part-r-00000_hfile");
-        SequenceFile.Writer hfilePartitionWriter = new SequenceFile.Writer(
-                hfilePartitionFile.getFileSystem(context.getConfiguration()),
-                context.getConfiguration(), hfilePartitionFile, 
ImmutableBytesWritable.class, NullWritable.class);
+        SequenceFile.Writer hfilePartitionWriter = new 
SequenceFile.Writer(hfilePartitionFile.getFileSystem(context.getConfiguration()),
 context.getConfiguration(), hfilePartitionFile, ImmutableBytesWritable.class, 
NullWritable.class);
         int hfileCountInOneRegion = 0;
         for (int i = hfileSizeGB; i < gbPoints.size(); i += hfileSizeGB) {
             hfilePartitionWriter.append(new 
ImmutableBytesWritable(gbPoints.get(i).getBytes()), NullWritable.get());

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RowValueDecoder.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RowValueDecoder.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RowValueDecoder.java
index e1e8f8c..2aa285c 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RowValueDecoder.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/RowValueDecoder.java
@@ -121,7 +121,7 @@ public class RowValueDecoder implements Cloneable {
     }
 
     // result is in order of <code>CubeDesc.getMeasures()</code>
-    public void loadCubeMeasureArray(Object result[]) {
+    public void loadCubeMeasureArray(Object[] result) {
         int[] measureIndex = hbaseColumn.getMeasureIndex();
         for (int i = 0; i < measureIndex.length; i++) {
             result[measureIndex[i]] = values[i];

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
index 801f8e4..8f2fc80 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SequenceFileCuboidWriter.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.storage.hbase.steps;
 
+import java.io.IOException;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,8 +34,6 @@ import org.apache.kylin.engine.mr.steps.KVGTRecordWriter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-
 /**
  */
 public class SequenceFileCuboidWriter extends KVGTRecordWriter {
@@ -46,7 +46,7 @@ public class SequenceFileCuboidWriter extends 
KVGTRecordWriter {
         try {
             initiate();
         } catch (IOException e) {
-           throw new RuntimeException(e);
+            throw new RuntimeException(e);
         }
     }
 
@@ -75,7 +75,7 @@ public class SequenceFileCuboidWriter extends 
KVGTRecordWriter {
 
     @Override
     protected void writeAsKeyValue(ByteArrayWritable key, ByteArrayWritable 
value) throws IOException {
-       
+
         Text outputValue = new Text();
         Text outputKey = new Text();
         outputKey.set(key.array(), key.offset(), key.length());

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
index 1ba96e9..9e30a06 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CleanHtableCLI.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.util.ToolRunner;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
index d33975e..3f67e5c 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCLI.java
@@ -18,12 +18,25 @@
 
 package org.apache.kylin.storage.hbase.util;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.*;
-import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.JsonSerializer;
 import org.apache.kylin.common.persistence.RawResource;
@@ -51,11 +64,6 @@ import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
 /**
  * <p/>
  * This tool serves for the purpose of migrating cubes. e.g. upgrade cube from
@@ -150,7 +158,7 @@ public class CubeMigrationCLI {
         moveCube(KylinConfig.createInstanceFromUri(srcCfgUri), 
KylinConfig.createInstanceFromUri(dstCfgUri), cubeName, projectName, copyAcl, 
purgeAndDisable, overwriteIfExists, realExecute);
     }
 
-    public static void checkMigrationSuccess(KylinConfig kylinConfig, String 
cubeName, Boolean ifFix) throws IOException{
+    public static void checkMigrationSuccess(KylinConfig kylinConfig, String 
cubeName, Boolean ifFix) throws IOException {
         CubeMigrationCheckCLI checkCLI = new 
CubeMigrationCheckCLI(kylinConfig, ifFix);
         checkCLI.execute(cubeName);
     }
@@ -225,7 +233,6 @@ public class CubeMigrationCLI {
         operations.add(new Opt(OptType.ADD_INTO_PROJECT, new Object[] { 
cubeName, projectName }));
     }
 
-
     private static void purgeAndDisable(String cubeName) throws IOException {
         operations.add(new Opt(OptType.PURGE_AND_DISABLE, new Object[] { 
cubeName }));
     }
@@ -305,169 +312,176 @@ public class CubeMigrationCLI {
         }
     }
 
+    @SuppressWarnings("checkstyle:methodlength")
     private static void doOpt(Opt opt) throws IOException, 
InterruptedException {
         logger.info("Executing operation: " + opt.toString());
 
         switch (opt.type) {
-            case CHANGE_HTABLE_HOST: {
-                String tableName = (String) opt.params[0];
-                HTableDescriptor desc = 
hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
-                hbaseAdmin.disableTable(tableName);
-                desc.setValue(IRealizationConstants.HTableTag, 
dstConfig.getMetadataUrlPrefix());
-                hbaseAdmin.modifyTable(tableName, desc);
-                hbaseAdmin.enableTable(tableName);
-                logger.info("CHANGE_HTABLE_HOST is completed");
-                break;
-            }
-            case COPY_FILE_IN_META: {
-                String item = (String) opt.params[0];
-                RawResource res = srcStore.getResource(item);
-                dstStore.putResource(item, res.inputStream, res.timestamp);
-                res.inputStream.close();
-                logger.info("Item " + item + " is copied");
-                break;
-            }
-            case COPY_DICT_OR_SNAPSHOT: {
-                String item = (String) opt.params[0];
-
-                if (item.toLowerCase().endsWith(".dict")) {
-                    DictionaryManager dstDictMgr = 
DictionaryManager.getInstance(dstConfig);
-                    DictionaryManager srcDicMgr = 
DictionaryManager.getInstance(srcConfig);
-                    DictionaryInfo dictSrc = srcDicMgr.getDictionaryInfo(item);
-
-                    long ts = dictSrc.getLastModified();
-                    dictSrc.setLastModified(0);//to avoid resource store write 
conflict
-                    Dictionary dictObj = 
dictSrc.getDictionaryObject().copyToAnotherMeta(srcConfig, dstConfig);
-                    DictionaryInfo dictSaved = 
dstDictMgr.trySaveNewDict(dictObj, dictSrc);
-                    dictSrc.setLastModified(ts);
-
-                    if (dictSaved == dictSrc) {
-                        //no dup found, already saved to dest
-                        logger.info("Item " + item + " is copied");
-                    } else {
-                        //dictSrc is rejected because of duplication
-                        //modify cube's dictionary path
-                        String cubeName = (String) opt.params[1];
-                        String cubeResPath = 
CubeInstance.concatResourcePath(cubeName);
-                        Serializer<CubeInstance> cubeSerializer = new 
JsonSerializer<CubeInstance>(CubeInstance.class);
-                        CubeInstance cube = dstStore.getResource(cubeResPath, 
CubeInstance.class, cubeSerializer);
-                        for (CubeSegment segment : cube.getSegments()) {
-                            for (Map.Entry<String, String> entry : 
segment.getDictionaries().entrySet()) {
-                                if (entry.getValue().equalsIgnoreCase(item)) {
-                                    
entry.setValue(dictSaved.getResourcePath());
-                                }
+        case CHANGE_HTABLE_HOST: {
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = 
hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+            hbaseAdmin.disableTable(tableName);
+            desc.setValue(IRealizationConstants.HTableTag, 
dstConfig.getMetadataUrlPrefix());
+            hbaseAdmin.modifyTable(tableName, desc);
+            hbaseAdmin.enableTable(tableName);
+            logger.info("CHANGE_HTABLE_HOST is completed");
+            break;
+        }
+        case COPY_FILE_IN_META: {
+            String item = (String) opt.params[0];
+            RawResource res = srcStore.getResource(item);
+            dstStore.putResource(item, res.inputStream, res.timestamp);
+            res.inputStream.close();
+            logger.info("Item " + item + " is copied");
+            break;
+        }
+        case COPY_DICT_OR_SNAPSHOT: {
+            String item = (String) opt.params[0];
+
+            if (item.toLowerCase().endsWith(".dict")) {
+                DictionaryManager dstDictMgr = 
DictionaryManager.getInstance(dstConfig);
+                DictionaryManager srcDicMgr = 
DictionaryManager.getInstance(srcConfig);
+                DictionaryInfo dictSrc = srcDicMgr.getDictionaryInfo(item);
+
+                long ts = dictSrc.getLastModified();
+                dictSrc.setLastModified(0);//to avoid resource store write 
conflict
+                Dictionary dictObj = 
dictSrc.getDictionaryObject().copyToAnotherMeta(srcConfig, dstConfig);
+                DictionaryInfo dictSaved = dstDictMgr.trySaveNewDict(dictObj, 
dictSrc);
+                dictSrc.setLastModified(ts);
+
+                if (dictSaved == dictSrc) {
+                    //no dup found, already saved to dest
+                    logger.info("Item " + item + " is copied");
+                } else {
+                    //dictSrc is rejected because of duplication
+                    //modify cube's dictionary path
+                    String cubeName = (String) opt.params[1];
+                    String cubeResPath = 
CubeInstance.concatResourcePath(cubeName);
+                    Serializer<CubeInstance> cubeSerializer = new 
JsonSerializer<CubeInstance>(CubeInstance.class);
+                    CubeInstance cube = dstStore.getResource(cubeResPath, 
CubeInstance.class, cubeSerializer);
+                    for (CubeSegment segment : cube.getSegments()) {
+                        for (Map.Entry<String, String> entry : 
segment.getDictionaries().entrySet()) {
+                            if (entry.getValue().equalsIgnoreCase(item)) {
+                                entry.setValue(dictSaved.getResourcePath());
                             }
                         }
-                        dstStore.putResource(cubeResPath, cube, 
cubeSerializer);
-                        logger.info("Item " + item + " is dup, instead " + 
dictSaved.getResourcePath() + " is reused");
                     }
+                    dstStore.putResource(cubeResPath, cube, cubeSerializer);
+                    logger.info("Item " + item + " is dup, instead " + 
dictSaved.getResourcePath() + " is reused");
+                }
+
+            } else if (item.toLowerCase().endsWith(".snapshot")) {
+                SnapshotManager dstSnapMgr = 
SnapshotManager.getInstance(dstConfig);
+                SnapshotManager srcSnapMgr = 
SnapshotManager.getInstance(srcConfig);
+                SnapshotTable snapSrc = srcSnapMgr.getSnapshotTable(item);
 
-                } else if (item.toLowerCase().endsWith(".snapshot")) {
-                    SnapshotManager dstSnapMgr = 
SnapshotManager.getInstance(dstConfig);
-                    SnapshotManager srcSnapMgr = 
SnapshotManager.getInstance(srcConfig);
-                    SnapshotTable snapSrc = srcSnapMgr.getSnapshotTable(item);
-
-                    long ts = snapSrc.getLastModified();
-                    snapSrc.setLastModified(0);
-                    SnapshotTable snapSaved = 
dstSnapMgr.trySaveNewSnapshot(snapSrc);
-                    snapSrc.setLastModified(ts);
-
-                    if (snapSaved == snapSrc) {
-                        //no dup found, already saved to dest
-                        logger.info("Item " + item + " is copied");
-
-                    } else {
-                        String cubeName = (String) opt.params[1];
-                        String cubeResPath = 
CubeInstance.concatResourcePath(cubeName);
-                        Serializer<CubeInstance> cubeSerializer = new 
JsonSerializer<CubeInstance>(CubeInstance.class);
-                        CubeInstance cube = dstStore.getResource(cubeResPath, 
CubeInstance.class, cubeSerializer);
-                        for (CubeSegment segment : cube.getSegments()) {
-                            for (Map.Entry<String, String> entry : 
segment.getSnapshots().entrySet()) {
-                                if (entry.getValue().equalsIgnoreCase(item)) {
-                                    
entry.setValue(snapSaved.getResourcePath());
-                                }
+                long ts = snapSrc.getLastModified();
+                snapSrc.setLastModified(0);
+                SnapshotTable snapSaved = 
dstSnapMgr.trySaveNewSnapshot(snapSrc);
+                snapSrc.setLastModified(ts);
+
+                if (snapSaved == snapSrc) {
+                    //no dup found, already saved to dest
+                    logger.info("Item " + item + " is copied");
+
+                } else {
+                    String cubeName = (String) opt.params[1];
+                    String cubeResPath = 
CubeInstance.concatResourcePath(cubeName);
+                    Serializer<CubeInstance> cubeSerializer = new 
JsonSerializer<CubeInstance>(CubeInstance.class);
+                    CubeInstance cube = dstStore.getResource(cubeResPath, 
CubeInstance.class, cubeSerializer);
+                    for (CubeSegment segment : cube.getSegments()) {
+                        for (Map.Entry<String, String> entry : 
segment.getSnapshots().entrySet()) {
+                            if (entry.getValue().equalsIgnoreCase(item)) {
+                                entry.setValue(snapSaved.getResourcePath());
                             }
                         }
-                        dstStore.putResource(cubeResPath, cube, 
cubeSerializer);
-                        logger.info("Item " + item + " is dup, instead " + 
snapSaved.getResourcePath() + " is reused");
-
                     }
+                    dstStore.putResource(cubeResPath, cube, cubeSerializer);
+                    logger.info("Item " + item + " is dup, instead " + 
snapSaved.getResourcePath() + " is reused");
 
-                } else {
-                    logger.error("unknown item found: " + item);
-                    logger.info("ignore it");
                 }
 
-                break;
+            } else {
+                logger.error("unknown item found: " + item);
+                logger.info("ignore it");
             }
-            case RENAME_FOLDER_IN_HDFS: {
-                String srcPath = (String) opt.params[0];
-                String dstPath = (String) opt.params[1];
-                hdfsFS.rename(new Path(srcPath), new Path(dstPath));
-                logger.info("HDFS Folder renamed from " + srcPath + " to " + 
dstPath);
-                break;
-            }
-            case ADD_INTO_PROJECT: {
-                String cubeName = (String) opt.params[0];
-                String projectName = (String) opt.params[1];
-                String projectResPath = 
ProjectInstance.concatResourcePath(projectName);
-                Serializer<ProjectInstance> projectSerializer = new 
JsonSerializer<ProjectInstance>(ProjectInstance.class);
-                ProjectInstance project = dstStore.getResource(projectResPath, 
ProjectInstance.class, projectSerializer);
-                project.removeRealization(RealizationType.CUBE, cubeName);
-                project.addRealizationEntry(RealizationType.CUBE, cubeName);
-                dstStore.putResource(projectResPath, project, 
projectSerializer);
-                logger.info("Project instance for " + projectName + " is 
corrected");
-                break;
-            }
-            case COPY_ACL: {
-                String cubeId = (String) opt.params[0];
-                String modelId = (String) opt.params[1];
-                String projectName = (String) opt.params[2];
-                String projectResPath = 
ProjectInstance.concatResourcePath(projectName);
-                Serializer<ProjectInstance> projectSerializer = new 
JsonSerializer<ProjectInstance>(ProjectInstance.class);
-                ProjectInstance project = dstStore.getResource(projectResPath, 
ProjectInstance.class, projectSerializer);
-                String projUUID = project.getUuid();
-                HTableInterface srcAclHtable = null;
-                HTableInterface destAclHtable = null;
-                try {
-                    srcAclHtable = 
HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix()
 + ACL_TABLE_NAME);
-                    destAclHtable = 
HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix()
 + ACL_TABLE_NAME);
-
-                    // cube acl
-                    Result result  = srcAclHtable.get(new 
Get(Bytes.toBytes(cubeId)));
-                    if (result.listCells() != null) {
-                        for (Cell cell : result.listCells()) {
-                            byte[] family = CellUtil.cloneFamily(cell);
-                            byte[] column = CellUtil.cloneQualifier(cell);
-                            byte[] value = CellUtil.cloneValue(cell);
-
-                            // use the target project uuid as the parent
-                            if (Bytes.toString(family).equals(ACL_INFO_FAMILY) 
&& Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
-                                String valueString = "{\"id\":\"" + projUUID + 
"\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
-                                value = Bytes.toBytes(valueString);
-                            }
-                            Put put = new Put(Bytes.toBytes(cubeId));
-                            put.add(family, column, value);
-                            destAclHtable.put(put);
+
+            break;
+        }
+        case RENAME_FOLDER_IN_HDFS: {
+            String srcPath = (String) opt.params[0];
+            String dstPath = (String) opt.params[1];
+            hdfsFS.rename(new Path(srcPath), new Path(dstPath));
+            logger.info("HDFS Folder renamed from " + srcPath + " to " + 
dstPath);
+            break;
+        }
+        case ADD_INTO_PROJECT: {
+            String cubeName = (String) opt.params[0];
+            String projectName = (String) opt.params[1];
+            String projectResPath = 
ProjectInstance.concatResourcePath(projectName);
+            Serializer<ProjectInstance> projectSerializer = new 
JsonSerializer<ProjectInstance>(ProjectInstance.class);
+            ProjectInstance project = dstStore.getResource(projectResPath, 
ProjectInstance.class, projectSerializer);
+            project.removeRealization(RealizationType.CUBE, cubeName);
+            project.addRealizationEntry(RealizationType.CUBE, cubeName);
+            dstStore.putResource(projectResPath, project, projectSerializer);
+            logger.info("Project instance for " + projectName + " is 
corrected");
+            break;
+        }
+        case COPY_ACL: {
+            String cubeId = (String) opt.params[0];
+            String modelId = (String) opt.params[1];
+            String projectName = (String) opt.params[2];
+            String projectResPath = 
ProjectInstance.concatResourcePath(projectName);
+            Serializer<ProjectInstance> projectSerializer = new 
JsonSerializer<ProjectInstance>(ProjectInstance.class);
+            ProjectInstance project = dstStore.getResource(projectResPath, 
ProjectInstance.class, projectSerializer);
+            String projUUID = project.getUuid();
+            HTableInterface srcAclHtable = null;
+            HTableInterface destAclHtable = null;
+            try {
+                srcAclHtable = 
HBaseConnection.get(srcConfig.getStorageUrl()).getTable(srcConfig.getMetadataUrlPrefix()
 + ACL_TABLE_NAME);
+                destAclHtable = 
HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix()
 + ACL_TABLE_NAME);
+
+                // cube acl
+                Result result = srcAclHtable.get(new 
Get(Bytes.toBytes(cubeId)));
+                if (result.listCells() != null) {
+                    for (Cell cell : result.listCells()) {
+                        byte[] family = CellUtil.cloneFamily(cell);
+                        byte[] column = CellUtil.cloneQualifier(cell);
+                        byte[] value = CellUtil.cloneValue(cell);
+
+                        // use the target project uuid as the parent
+                        if (Bytes.toString(family).equals(ACL_INFO_FAMILY) && 
Bytes.toString(column).equals(ACL_INFO_FAMILY_PARENT_COLUMN)) {
+                            String valueString = "{\"id\":\"" + projUUID + 
"\",\"type\":\"org.apache.kylin.metadata.project.ProjectInstance\"}";
+                            value = Bytes.toBytes(valueString);
                         }
+                        Put put = new Put(Bytes.toBytes(cubeId));
+                        put.add(family, column, value);
+                        destAclHtable.put(put);
                     }
-                    destAclHtable.flushCommits();
-                } finally {
-                    IOUtils.closeQuietly(srcAclHtable);
-                    IOUtils.closeQuietly(destAclHtable);
                 }
-                break;
-            }
-            case PURGE_AND_DISABLE:{
-                String cubeName = (String) opt.params[0];
-                String cubeResPath = CubeInstance.concatResourcePath(cubeName);
-                Serializer<CubeInstance> cubeSerializer = new 
JsonSerializer<CubeInstance>(CubeInstance.class);
-                CubeInstance cube = srcStore.getResource(cubeResPath, 
CubeInstance.class, cubeSerializer);
-                cube.getSegments().clear();
-                cube.setStatus(RealizationStatusEnum.DISABLED);
-                srcStore.putResource(cubeResPath, cube, cubeSerializer);
-                logger.info("Cube " + cubeName + " is purged and disabled in " 
+ srcConfig.getMetadataUrl());
+                destAclHtable.flushCommits();
+            } finally {
+                IOUtils.closeQuietly(srcAclHtable);
+                IOUtils.closeQuietly(destAclHtable);
             }
+            break;
+        }
+        case PURGE_AND_DISABLE: {
+            String cubeName = (String) opt.params[0];
+            String cubeResPath = CubeInstance.concatResourcePath(cubeName);
+            Serializer<CubeInstance> cubeSerializer = new 
JsonSerializer<CubeInstance>(CubeInstance.class);
+            CubeInstance cube = srcStore.getResource(cubeResPath, 
CubeInstance.class, cubeSerializer);
+            cube.getSegments().clear();
+            cube.setStatus(RealizationStatusEnum.DISABLED);
+            srcStore.putResource(cubeResPath, cube, cubeSerializer);
+            logger.info("Cube " + cubeName + " is purged and disabled in " + 
srcConfig.getMetadataUrl());
+
+            break;
+        }
+        default: {
+            //do nothing
+            break;
+        }
         }
     }
 
@@ -475,58 +489,63 @@ public class CubeMigrationCLI {
         logger.info("Undo operation: " + opt.toString());
 
         switch (opt.type) {
-            case CHANGE_HTABLE_HOST: {
-                String tableName = (String) opt.params[0];
-                HTableDescriptor desc = 
hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
-                hbaseAdmin.disableTable(tableName);
-                desc.setValue(IRealizationConstants.HTableTag, 
srcConfig.getMetadataUrlPrefix());
-                hbaseAdmin.modifyTable(tableName, desc);
-                hbaseAdmin.enableTable(tableName);
-                break;
-            }
-            case COPY_FILE_IN_META: {
-                // no harm
-                logger.info("Undo for COPY_FILE_IN_META is ignored");
-                break;
-            }
-            case COPY_DICT_OR_SNAPSHOT: {
-                // no harm
-                logger.info("Undo for COPY_DICT_OR_SNAPSHOT is ignored");
-                break;
-            }
-            case RENAME_FOLDER_IN_HDFS: {
-                String srcPath = (String) opt.params[1];
-                String dstPath = (String) opt.params[0];
-
-                if (hdfsFS.exists(new Path(srcPath)) && !hdfsFS.exists(new 
Path(dstPath))) {
-                    hdfsFS.rename(new Path(srcPath), new Path(dstPath));
-                    logger.info("HDFS Folder renamed from " + srcPath + " to " 
+ dstPath);
-                }
-                break;
-            }
-            case ADD_INTO_PROJECT: {
-                logger.info("Undo for ADD_INTO_PROJECT is ignored");
-                break;
-            }
-            case COPY_ACL: {
-                String cubeId = (String) opt.params[0];
-                String modelId = (String) opt.params[1];
-                HTableInterface destAclHtable = null;
-                try {
-                    destAclHtable = 
HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix()
 + ACL_TABLE_NAME);
+        case CHANGE_HTABLE_HOST: {
+            String tableName = (String) opt.params[0];
+            HTableDescriptor desc = 
hbaseAdmin.getTableDescriptor(TableName.valueOf(tableName));
+            hbaseAdmin.disableTable(tableName);
+            desc.setValue(IRealizationConstants.HTableTag, 
srcConfig.getMetadataUrlPrefix());
+            hbaseAdmin.modifyTable(tableName, desc);
+            hbaseAdmin.enableTable(tableName);
+            break;
+        }
+        case COPY_FILE_IN_META: {
+            // no harm
+            logger.info("Undo for COPY_FILE_IN_META is ignored");
+            break;
+        }
+        case COPY_DICT_OR_SNAPSHOT: {
+            // no harm
+            logger.info("Undo for COPY_DICT_OR_SNAPSHOT is ignored");
+            break;
+        }
+        case RENAME_FOLDER_IN_HDFS: {
+            String srcPath = (String) opt.params[1];
+            String dstPath = (String) opt.params[0];
 
-                    destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
-                    destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
-                    destAclHtable.flushCommits();
-                } finally {
-                    IOUtils.closeQuietly(destAclHtable);
-                }
-                break;
+            if (hdfsFS.exists(new Path(srcPath)) && !hdfsFS.exists(new 
Path(dstPath))) {
+                hdfsFS.rename(new Path(srcPath), new Path(dstPath));
+                logger.info("HDFS Folder renamed from " + srcPath + " to " + 
dstPath);
             }
-            case PURGE_AND_DISABLE: {
-                logger.info("Undo for PURGE_AND_DISABLE is not supported");
-                break;
+            break;
+        }
+        case ADD_INTO_PROJECT: {
+            logger.info("Undo for ADD_INTO_PROJECT is ignored");
+            break;
+        }
+        case COPY_ACL: {
+            String cubeId = (String) opt.params[0];
+            String modelId = (String) opt.params[1];
+            HTableInterface destAclHtable = null;
+            try {
+                destAclHtable = 
HBaseConnection.get(dstConfig.getStorageUrl()).getTable(dstConfig.getMetadataUrlPrefix()
 + ACL_TABLE_NAME);
+
+                destAclHtable.delete(new Delete(Bytes.toBytes(cubeId)));
+                destAclHtable.delete(new Delete(Bytes.toBytes(modelId)));
+                destAclHtable.flushCommits();
+            } finally {
+                IOUtils.closeQuietly(destAclHtable);
             }
+            break;
+        }
+        case PURGE_AND_DISABLE: {
+            logger.info("Undo for PURGE_AND_DISABLE is not supported");
+            break;
+        }
+        default:
+        {
+            //do nothing
+            break;
+        }
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
index f1fc8e1..295750a 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMigrationCheckCLI.java
@@ -18,13 +18,15 @@
 
 package org.apache.kylin.storage.hbase.util;
 
-import com.google.common.collect.Lists;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
@@ -38,9 +40,7 @@ import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
+import com.google.common.collect.Lists;
 
 /**
  * <p/>
@@ -59,13 +59,12 @@ public class CubeMigrationCheckCLI {
 
     private static final Option OPTION_CUBE = 
OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("The
 name of cube migrated").create("cube");
 
-
     private KylinConfig dstCfg;
     private HBaseAdmin hbaseAdmin;
 
     private List<String> issueExistHTables;
     private List<String> inconsistentHTables;
-    
+
     private boolean ifFix = false;
 
     public static void main(String[] args) throws ParseException, IOException {
@@ -101,9 +100,9 @@ public class CubeMigrationCheckCLI {
         }
 
         KylinConfig kylinConfig;
-        if(dstCfgUri==null){
+        if (dstCfgUri == null) {
             kylinConfig = KylinConfig.getInstanceFromEnv();
-        }else{
+        } else {
             kylinConfig = KylinConfig.createInstanceFromUri(dstCfgUri);
         }
 
@@ -111,21 +110,21 @@ public class CubeMigrationCheckCLI {
         checkCLI.execute(cubeName);
     }
 
-    public void execute() throws IOException{
+    public void execute() throws IOException {
         execute(null);
     }
 
-    public void execute(String cubeName) throws IOException{
-        if(cubeName==null){
+    public void execute(String cubeName) throws IOException {
+        if (cubeName == null) {
             checkAll();
-        }else {
+        } else {
             checkCube(cubeName);
         }
         fixInconsistent();
         printIssueExistingHTables();
     }
 
-    public CubeMigrationCheckCLI(KylinConfig kylinConfig, Boolean isFix) 
throws IOException{
+    public CubeMigrationCheckCLI(KylinConfig kylinConfig, Boolean isFix) 
throws IOException {
         this.dstCfg = kylinConfig;
         this.ifFix = isFix;
 
@@ -140,12 +139,12 @@ public class CubeMigrationCheckCLI {
         List<String> segFullNameList = Lists.newArrayList();
 
         CubeInstance cube = CubeManager.getInstance(dstCfg).getCube(cubeName);
-        addHTableNamesForCube(cube,segFullNameList);
+        addHTableNamesForCube(cube, segFullNameList);
 
         check(segFullNameList);
     }
 
-    public void checkAll(){
+    public void checkAll() {
         List<String> segFullNameList = Lists.newArrayList();
 
         CubeManager cubeMgr = CubeManager.getInstance(dstCfg);
@@ -156,58 +155,58 @@ public class CubeMigrationCheckCLI {
         check(segFullNameList);
     }
 
-    public void addHTableNamesForCube(CubeInstance cube, List<String> 
segFullNameList){
+    public void addHTableNamesForCube(CubeInstance cube, List<String> 
segFullNameList) {
         for (CubeSegment seg : cube.getSegments()) {
             String tableName = seg.getStorageLocationIdentifier();
-            segFullNameList.add(tableName+","+cube.getName());
+            segFullNameList.add(tableName + "," + cube.getName());
         }
     }
 
-    public void check(List<String> segFullNameList){
+    public void check(List<String> segFullNameList) {
         issueExistHTables = Lists.newArrayList();
         inconsistentHTables = Lists.newArrayList();
 
-        for(String segFullName:segFullNameList){
+        for (String segFullName : segFullNameList) {
             String[] sepNameList = segFullName.split(",");
             try {
                 HTableDescriptor hTableDescriptor = 
hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
                 String host = 
hTableDescriptor.getValue(IRealizationConstants.HTableTag);
-                if(!dstCfg.getMetadataUrlPrefix().equalsIgnoreCase(host)){
+                if (!dstCfg.getMetadataUrlPrefix().equalsIgnoreCase(host)) {
                     inconsistentHTables.add(segFullName);
                 }
-            }catch (IOException e){
+            } catch (IOException e) {
                 issueExistHTables.add(segFullName);
                 continue;
             }
         }
     }
 
-    public void fixInconsistent() throws IOException{
-        if(ifFix == true){
-            for(String segFullName : inconsistentHTables){
+    public void fixInconsistent() throws IOException {
+        if (ifFix == true) {
+            for (String segFullName : inconsistentHTables) {
                 String[] sepNameList = segFullName.split(",");
                 HTableDescriptor desc = 
hbaseAdmin.getTableDescriptor(TableName.valueOf(sepNameList[0]));
-                logger.info("Change the host of htable 
"+sepNameList[0]+"belonging to cube "+sepNameList[1]+" from 
"+desc.getValue(IRealizationConstants.HTableTag)+" to 
"+dstCfg.getMetadataUrlPrefix());
+                logger.info("Change the host of htable " + sepNameList[0] + 
"belonging to cube " + sepNameList[1] + " from " + 
desc.getValue(IRealizationConstants.HTableTag) + " to " + 
dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.disableTable(sepNameList[0]);
                 desc.setValue(IRealizationConstants.HTableTag, 
dstCfg.getMetadataUrlPrefix());
                 hbaseAdmin.modifyTable(sepNameList[0], desc);
                 hbaseAdmin.enableTable(sepNameList[0]);
             }
-        }else{
+        } else {
             logger.info("------ Inconsistent HTables Needed To Be Fixed 
------");
             for (String hTable : inconsistentHTables) {
                 String[] sepNameList = hTable.split(",");
-                logger.info(sepNameList[0]+" belonging to cube 
"+sepNameList[1]);
+                logger.info(sepNameList[0] + " belonging to cube " + 
sepNameList[1]);
             }
             
logger.info("----------------------------------------------------");
         }
     }
 
-    public void printIssueExistingHTables(){
+    public void printIssueExistingHTables() {
         logger.info("------ HTables exist issues in hbase : not existing, 
metadata broken ------");
-        for(String segFullName : issueExistHTables){
+        for (String segFullName : issueExistHTables) {
             String[] sepNameList = segFullName.split(",");
-            logger.error(sepNameList[0]+" belonging to cube "+sepNameList[1]+" 
has some issues and cannot be read successfully!!!");
+            logger.error(sepNameList[0] + " belonging to cube " + 
sepNameList[1] + " has some issues and cannot be read successfully!!!");
         }
         logger.info("----------------------------------------------------");
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
index 8a88b6d..86ba22f 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/GridTableHBaseBenchmark.java
@@ -267,6 +267,7 @@ public class GridTableHBaseBenchmark {
                 hbase.getTableDescriptor(TableName.valueOf(tableName));
                 tableExist = true;
             } catch (TableNotFoundException e) {
+                //do nothing?
             }
 
             if (tableExist) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
index 4ce0dac..b6958d6 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseClean.java
@@ -22,12 +22,10 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
 
-import com.google.common.collect.Lists;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.util.ToolRunner;
@@ -37,6 +35,8 @@ import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
 /**
  * clean hbase tables by tag
  */
@@ -63,7 +63,7 @@ public class HBaseClean extends AbstractHadoopJob {
             parseOptions(options, args);
 
             logger.info("options: '" + getOptionsAsString() + "'");
-            
+
             tag = getOptionValue(OPTION_TAG);
             delete = Boolean.parseBoolean(getOptionValue(OPTION_DELETE));
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
index 0a791b7..266f7e7 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HBaseUsage.java
@@ -24,7 +24,6 @@ import java.util.Map;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.kylin.metadata.realization.IRealizationConstants;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
index deb8da1..f30f2c9 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HbaseStreamingInput.java
@@ -28,7 +28,6 @@ import java.util.Random;
 import java.util.concurrent.Semaphore;
 
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
index f1ca4de..c435f34 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HiveCmdBuilder.java
@@ -54,41 +54,41 @@ public class HiveCmdBuilder {
         StringBuffer buf = new StringBuffer();
 
         switch (clientMode) {
-            case CLI:
-                buf.append("hive -e \"");
+        case CLI:
+            buf.append("hive -e \"");
+            for (String statement : statements) {
+                buf.append(statement).append("\n");
+            }
+            buf.append("\"");
+            break;
+        case BEELINE:
+            BufferedWriter bw = null;
+            try {
+                File tmpHql = File.createTempFile("beeline_", ".hql");
+                StringBuffer hqlBuf = new StringBuffer();
+                bw = new BufferedWriter(new FileWriter(tmpHql));
                 for (String statement : statements) {
-                    buf.append(statement).append("\n");
-                }
-                buf.append("\"");
-                break;
-            case BEELINE:
-                BufferedWriter bw = null;
-                try {
-                    File tmpHql = File.createTempFile("beeline_", ".hql");
-                    StringBuffer hqlBuf = new StringBuffer();
-                    bw = new BufferedWriter(new FileWriter(tmpHql));
-                    for (String statement : statements) {
-                        bw.write(statement);
-                        bw.newLine();
-
-                        hqlBuf.append(statement).append("\n");
-                    }
-                    buf.append("beeline ");
-                    buf.append(kylinConfig.getHiveBeelineParams());
-                    buf.append(" -f ");
-                    buf.append(tmpHql.getAbsolutePath());
-                    buf.append(";rm -f ");
-                    buf.append(tmpHql.getAbsolutePath());
-
-                    logger.info("The statements to execute in beeline: \n" + 
hqlBuf);
-                } catch (IOException e) {
-                    throw new RuntimeException(e);
-                } finally {
-                    IOUtils.closeQuietly(bw);
+                    bw.write(statement);
+                    bw.newLine();
+
+                    hqlBuf.append(statement).append("\n");
                 }
-                break;
-            default:
-                throw new RuntimeException("Hive client cannot be recognized: 
" + clientMode);
+                buf.append("beeline ");
+                buf.append(kylinConfig.getHiveBeelineParams());
+                buf.append(" -f ");
+                buf.append(tmpHql.getAbsolutePath());
+                buf.append(";rm -f ");
+                buf.append(tmpHql.getAbsolutePath());
+
+                logger.info("The statements to execute in beeline: \n" + 
hqlBuf);
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            } finally {
+                IOUtils.closeQuietly(bw);
+            }
+            break;
+        default:
+            throw new RuntimeException("Hive client cannot be recognized: " + 
clientMode);
         }
 
         return buf.toString();

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
index 2b4a9a7..4db183b 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/HtableAlterMetadataCLI.java
@@ -24,7 +24,6 @@ import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index b1a5ba4..01edb1f 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -22,7 +22,6 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HTableInterface;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index ac35ccf..af64df7 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -24,7 +24,12 @@ import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import java.util.concurrent.*;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 23357f5..26ee055 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -63,7 +63,7 @@ public class UpdateHTableHostCLI {
         this.kylinConfig = KylinConfig.getInstanceFromEnv();
     }
 
-    public static void main(String args[]) throws Exception {
+    public static void main(String[] args) throws Exception {
         if (args.length < 1) {
             printUsageAndExit();
         }

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
index 86f895a..e516bc2 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/FuzzyValueCombinationTest.java
@@ -18,7 +18,7 @@
 
 package org.apache.kylin.storage.hbase.common;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 
 import java.util.Arrays;
 import java.util.HashSet;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
index 19ba688..f8e2644 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/AggregateRegionObserverTest.java
@@ -18,7 +18,8 @@
 
 package org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.math.BigDecimal;
@@ -89,7 +90,7 @@ public class AggregateRegionObserverTest {
 
     private Cell newCell(byte[] key, HCol col, String decimal, int number) {
         Object[] values = number == Integer.MIN_VALUE ? //
-        new Object[] { new BigDecimal(decimal) } //
+                new Object[] { new BigDecimal(decimal) } //
                 : new Object[] { new BigDecimal(decimal), new 
LongMutable(number) };
         ByteBuffer buf = col.measureCodec.encode(values);
 
@@ -202,7 +203,7 @@ public class AggregateRegionObserverTest {
         t.setDatabase("DEFAULT");
         TblColRef[] cols = new TblColRef[] { newCol(1, "A", t), newCol(2, "B", 
t), newCol(3, "C", t), newCol(4, "D", t) };
         int[] sizes = new int[] { 1, 1, 1, 1 };
-        return new CoprocessorRowType(cols, sizes,0);
+        return new CoprocessorRowType(cols, sizes, 0);
     }
 
     private TblColRef newCol(int i, String name, TableDesc t) {

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/RowAggregatorsTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/RowAggregatorsTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/RowAggregatorsTest.java
index 7881688..d0a0710 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/RowAggregatorsTest.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/coprocessor/observer/RowAggregatorsTest.java
@@ -34,7 +34,7 @@ public class RowAggregatorsTest {
     @Test
     public void testSerialize() {
         ObserverAggregators.HCol[] hcols = new ObserverAggregators.HCol[] { //
-        newHCol("f", "c1", new String[] { "SUM", "COUNT" }, new String[] { 
"decimal", "long" }), //
+                newHCol("f", "c1", new String[] { "SUM", "COUNT" }, new 
String[] { "decimal", "long" }), //
                 newHCol("f", "c2", new String[] { "SUM", "SUM" }, new String[] 
{ "long", "long" }) };
         ObserverAggregators sample = new ObserverAggregators(hcols);
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2.java
index 4547896..74b2112 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/cube/v1/filter/TestFuzzyRowFilterV2.java
@@ -33,9 +33,10 @@ public class TestFuzzyRowFilterV2 {
 
         Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.YES, 
FuzzyRowFilterV2.satisfiesNoUnsafe(false, new byte[] { 1, 2, 1, 3, 3 }, 0, 5, 
new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 }));
 
-        Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.NEXT_EXISTS, 
FuzzyRowFilterV2.satisfiesNoUnsafe(false, new byte[] { 1, 1, 1, 3, 0 }, // row 
to check
-                0, 5, new byte[] { 1, 2, 0, 3 }, // fuzzy row
-                new byte[] { 0, 0, 1, 0 })); // mask
+        Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.NEXT_EXISTS,
+                FuzzyRowFilterV2.satisfiesNoUnsafe(false, new byte[] { 1, 1, 
1, 3, 0 }, // row to check
+                        0, 5, new byte[] { 1, 2, 0, 3 }, // fuzzy row
+                        new byte[] { 0, 0, 1, 0 })); // mask
 
         Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.NEXT_EXISTS, 
FuzzyRowFilterV2.satisfiesNoUnsafe(false, new byte[] { 1, 1, 1, 3, 0 }, 0, 5, 
new byte[] { 1, (byte) 245, 0, 3 }, new byte[] { 0, 0, 1, 0 }));
 
@@ -51,9 +52,10 @@ public class TestFuzzyRowFilterV2 {
 
         Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.YES, 
FuzzyRowFilterV2.satisfies(false, new byte[] { 1, 2, 1, 3, 3 }, new byte[] { 1, 
2, 0, 3 }, new byte[] { -1, -1, 0, -1 }));
 
-        Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.NEXT_EXISTS, 
FuzzyRowFilterV2.satisfies(false, new byte[] { 1, 1, 1, 3, 0 }, // row to check
-                new byte[] { 1, 2, 0, 3 }, // fuzzy row
-                new byte[] { -1, -1, 0, -1 })); // mask
+        Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.NEXT_EXISTS,
+                FuzzyRowFilterV2.satisfies(false, new byte[] { 1, 1, 1, 3, 0 
}, // row to check
+                        new byte[] { 1, 2, 0, 3 }, // fuzzy row
+                        new byte[] { -1, -1, 0, -1 })); // mask
 
         Assert.assertEquals(FuzzyRowFilterV2.SatisfiesCode.NEXT_EXISTS, 
FuzzyRowFilterV2.satisfies(false, new byte[] { 1, 1, 1, 3, 0 }, new byte[] { 1, 
(byte) 245, 0, 3 }, new byte[] { -1, -1, 0, -1 }));
 
@@ -172,11 +174,11 @@ public class TestFuzzyRowFilterV2 {
                 new byte[] { 1, 1, 0, 2, 0 }); // expected next
 
         assertNext(true, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 }, new 
byte[] { 1, (byte) 128, 2, 0, 1 },
-        // TODO: should be {1, (byte) 128, 2} ?
+                // TODO: should be {1, (byte) 128, 2} ?
                 new byte[] { 1, (byte) 128, 1, (byte) 0xFF, (byte) 0xFF });
 
         assertNext(true, new byte[] { 0, 1, 0, 1 }, new byte[] { 0, -1, 0, -1 
}, new byte[] { 5, 1, 0, 2, 1 },
-        // TODO: should be {5, 1, 0, 2} ?
+                // TODO: should be {5, 1, 0, 2} ?
                 new byte[] { 5, 1, 0, 1, (byte) 0xFF });
 
         assertNext(true, new byte[] { 0, 1, 0, 0 }, // fuzzy row
@@ -195,7 +197,7 @@ public class TestFuzzyRowFilterV2 {
                 new byte[] { 5, 1, (byte) 254, 1 }); // expected next
 
         assertNext(true, new byte[] { 1, 1, 0, 0 }, new byte[] { -1, -1, 0, 0 
}, new byte[] { 2, 1, 3, 2 },
-        // TODO: should be {1, 0} ?
+                // TODO: should be {1, 0} ?
                 new byte[] { 1, 1, 0, 0 });
 
         assertNext(true, new byte[] { 1, 0, 1 }, // fuzzy row
@@ -205,35 +207,35 @@ public class TestFuzzyRowFilterV2 {
                 new byte[] { 1, 0, 1, (byte) 0xFF, (byte) 0xFF });
 
         assertNext(true, new byte[] { 1, 1, 0, 3 }, new byte[] { -1, -1, 0, -1 
}, new byte[] { 1, (byte) 245, 1, 3, 0 },
-        // TODO: should be {1, 1, (byte) 255, 4} ?
+                // TODO: should be {1, 1, (byte) 255, 4} ?
                 new byte[] { 1, 1, 0, 3, (byte) 0xFF });
 
         assertNext(true, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 
}, new byte[] { 1, 3, 1, 3, 0 },
-        // TODO: should be 1, 2, (byte) 255, 4 ?
+                // TODO: should be 1, 2, (byte) 255, 4 ?
                 new byte[] { 1, 2, 0, 3, (byte) 0xFF });
 
         assertNext(true, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 
}, new byte[] { 2, 1, 1, 1, 0 },
-        // TODO: should be {1, 2, (byte) 255, 4} ?
+                // TODO: should be {1, 2, (byte) 255, 4} ?
                 new byte[] { 1, 2, 0, 3, (byte) 0xFF });
 
         assertNext(true,
-        // TODO: should be null?
+                // TODO: should be null?
                 new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 }, new byte[] { 
1, (byte) 128, 2 }, new byte[] { 1, (byte) 128, 1 });
 
         assertNext(true,
-        // TODO: should be null?
+                // TODO: should be null?
                 new byte[] { 0, 1, 0, 1 }, new byte[] { 0, -1, 0, -1 }, new 
byte[] { 5, 1, 0, 2 }, new byte[] { 5, 1, 0, 1 });
 
         assertNext(true,
-        // TODO: should be null?
+                // TODO: should be null?
                 new byte[] { 5, 1, 1, 0 }, new byte[] { -1, -1, 0, 0 }, new 
byte[] { 5, 1, (byte) 0xFF, 1 }, new byte[] { 5, 1, (byte) 0xFF, 0 });
 
         assertNext(true,
-        // TODO: should be null?
+                // TODO: should be null?
                 new byte[] { 1, 1, 1, 1 }, new byte[] { -1, -1, 0, 0 }, new 
byte[] { 1, 1, 2, 2 }, new byte[] { 1, 1, 2, 1 });
 
         assertNext(true,
-        // TODO: should be null?
+                // TODO: should be null?
                 new byte[] { 1, 1, 1, 1 }, new byte[] { 0, 0, 0, 0 }, new 
byte[] { 1, 1, 2, 3 }, new byte[] { 1, 1, 2, 2 });
 
         Assert.assertNull(FuzzyRowFilterV2.getNextForFuzzyRule(true, new 
byte[] { 1, 1, 1, 3, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 
}));

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
index 04e1f7f..ac9e995 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
@@ -18,7 +18,16 @@
 
 package org.apache.kylin.storage.hbase.ii.coprocessor.endpoint;
 
-import com.google.common.collect.Lists;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
 
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.invertedindex.IIInstance;
@@ -37,11 +46,7 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.util.*;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
+import com.google.common.collect.Lists;
 
 /**
  *

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CreateHTableTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CreateHTableTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CreateHTableTest.java
index 6868533..f994886 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CreateHTableTest.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CreateHTableTest.java
@@ -43,7 +43,7 @@ public class CreateHTableTest extends 
LocalFileMetadataTestCase {
         conf.set("fs.default.name", "file:///");
         conf.set("mapreduce.framework.name", "local");
         conf.set("mapreduce.application.framework.path", "");
-        
+
         this.createTestMetadata();
 
     }

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
index dbf39e7..c1fd2e2 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/CubeHFileMapper2Test.java
@@ -21,7 +21,6 @@ package org.apache.kylin.storage.hbase.steps;
 import static org.junit.Assert.assertTrue;
 
 import java.io.File;
-import java.nio.ByteBuffer;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -32,7 +31,6 @@ import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.common.util.LocalFileMetadataTestCase;
 import org.apache.kylin.cube.CubeManager;
-import org.apache.kylin.cube.kv.RowConstants;
 import org.apache.kylin.cube.model.CubeDesc;
 import org.apache.kylin.engine.mr.HadoopUtil;
 import org.apache.kylin.measure.MeasureDecoder;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
index 6475bad..81ac32f 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/RowValueDecoderTest.java
@@ -18,7 +18,7 @@
 
 package org.apache.kylin.storage.hbase.steps;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
index e49640e..3d6ac57 100644
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
+++ 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java
@@ -54,10 +54,10 @@ public class SandboxMetastoreCLI {
 
         if ("reset".equalsIgnoreCase(args[0])) {
             ResourceTool.main(new String[] { "reset" });
-        }else if ("download".equalsIgnoreCase(args[0])) {
+        } else if ("download".equalsIgnoreCase(args[0])) {
             ResourceTool.main(new String[] { "download", args[1] });
         } else if ("fetch".equalsIgnoreCase(args[0])) {
-            ResourceTool.main(new String[] { "fetch", args[1], args[2]});
+            ResourceTool.main(new String[] { "fetch", args[1], args[2] });
         } else if ("upload".equalsIgnoreCase(args[0])) {
             ResourceTool.main(new String[] { "upload", args[1] });
         } else {

http://git-wip-us.apache.org/repos/asf/kylin/blob/5d4982e2/tool/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/tool/.settings/org.eclipse.core.resources.prefs 
b/tool/.settings/org.eclipse.core.resources.prefs
new file mode 100644
index 0000000..365bbd6
--- /dev/null
+++ b/tool/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,5 @@
+eclipse.preferences.version=1
+encoding//src/main/java=UTF-8
+encoding//src/main/resources=UTF-8
+encoding//src/test/java=UTF-8
+encoding/<project>=UTF-8

Reply via email to