Repository: kylin
Updated Branches:
  refs/heads/orderedbytes 5064b6b1a -> 164288ccb


KYLIN-1726 fix BuildCubeWithStream


Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/164288cc
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/164288cc
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/164288cc

Branch: refs/heads/orderedbytes
Commit: 164288ccb1c7ff529703402f687181d818b2c3fc
Parents: 5064b6b
Author: Hongbin Ma <mahong...@apache.org>
Authored: Sat Oct 8 14:08:41 2016 +0800
Committer: Hongbin Ma <mahong...@apache.org>
Committed: Sat Oct 8 14:08:41 2016 +0800

----------------------------------------------------------------------
 .../kylin/provision/BuildCubeWithStream.java    | 27 +++++++-------------
 1 file changed, 9 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/164288cc/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
----------------------------------------------------------------------
diff --git 
a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java 
b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
index f8805a6..d431005 100644
--- a/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
+++ b/kylin-it/src/test/java/org/apache/kylin/provision/BuildCubeWithStream.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.provision;
 
+import static java.lang.Thread.sleep;
+
 import java.io.File;
 import java.io.IOException;
 import java.text.ParseException;
@@ -32,10 +34,8 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.FutureTask;
 import java.util.concurrent.TimeUnit;
 
-import com.google.common.collect.Lists;
 import org.I0Itec.zkclient.ZkConnection;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.util.ToolRunner;
 import org.apache.kafka.common.requests.MetadataResponse;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.ClassUtil;
@@ -65,7 +65,7 @@ import org.junit.Assert;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static java.lang.Thread.sleep;
+import com.google.common.collect.Lists;
 
 /**
  *  for streaming cubing case "test_streaming_table"
@@ -205,7 +205,7 @@ public class BuildCubeWithStream {
         for (int i = 0; i < futures.size(); i++) {
             ExecutableState result = futures.get(i).get(20, TimeUnit.MINUTES);
             logger.info("Checking building task " + i + " whose state is " + 
result);
-            Assert.assertTrue(result == null || result == 
ExecutableState.SUCCEED || result == ExecutableState.DISCARDED );
+            Assert.assertTrue(result == null || result == 
ExecutableState.SUCCEED || result == ExecutableState.DISCARDED);
             if (result == ExecutableState.SUCCEED)
                 succeedBuild++;
         }
@@ -214,7 +214,6 @@ public class BuildCubeWithStream {
         List<CubeSegment> segments = 
cubeManager.getCube(cubeName).getSegments(SegmentStatusEnum.READY);
         Assert.assertTrue(segments.size() == succeedBuild);
 
-
         if (fastBuildMode == false) {
             //empty build
             ExecutableState result = buildSegment(cubeName, 0, Long.MAX_VALUE);
@@ -238,7 +237,6 @@ public class BuildCubeWithStream {
         logger.info("Build is done");
     }
 
-
     private ExecutableState mergeSegment(String cubeName, long startOffset, 
long endOffset) throws Exception {
         CubeSegment segment = 
cubeManager.mergeSegments(cubeManager.getCube(cubeName), 0, 0, startOffset, 
endOffset, false);
         DefaultChainedExecutable job = 
EngineFactory.createBatchMergeJob(segment, "TEST");
@@ -279,14 +277,12 @@ public class BuildCubeWithStream {
         
HBaseMetadataTestCase.staticCreateTestMetadata(HBaseMetadataTestCase.SANDBOX_TEST_DATA);
     }
 
-    public static void afterClass() throws Exception {
-        cleanupOldStorage();
-        HBaseMetadataTestCase.staticCleanupTestMetadata();
-    }
-
     public void after() {
         kafkaServer.stop();
         DefaultScheduler.destroyInstance();
+
+        cleanupOldStorage();
+        HBaseMetadataTestCase.staticCleanupTestMetadata();
     }
 
     protected void waitForJob(String jobId) {
@@ -304,7 +300,7 @@ public class BuildCubeWithStream {
         }
     }
 
-    private static void cleanupOldStorage() throws Exception {
+    protected void cleanupOldStorage() {
         String[] args = { "--delete", "true" };
         StorageCleanupJob cli = new StorageCleanupJob();
         cli.execute(args);
@@ -317,17 +313,12 @@ public class BuildCubeWithStream {
             buildCubeWithStream = new BuildCubeWithStream();
             buildCubeWithStream.before();
             buildCubeWithStream.build();
+            buildCubeWithStream.after();
             logger.info("Going to exit");
             System.exit(0);
         } catch (Throwable e) {
             logger.error("error", e);
             System.exit(1);
-        } finally {
-            if (buildCubeWithStream != null) {
-                buildCubeWithStream.after();
-            }
-            afterClass();
         }
-
     }
 }

Reply via email to