This is an automated email from the ASF dual-hosted git repository.

nju_yaho pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/master by this push:
     new b780759  KYLIN-3740 fix NPE during step of Extract Fact Table Distinct 
Columns
b780759 is described below

commit b780759c6641f0dfbd57f91d4f3d568b8dbb46f6
Author: kyotoYaho <nju_y...@apache.org>
AuthorDate: Mon Jan 7 09:48:59 2019 +0800

    KYLIN-3740 fix NPE during step of Extract Fact Table Distinct Columns
---
 .../engine/mr/steps/FactDistinctColumnsJob.java    | 22 +++++++++++++---------
 1 file changed, 13 insertions(+), 9 deletions(-)

diff --git 
a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
 
b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
index 8f5d176..6f78de1 100755
--- 
a/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
+++ 
b/engine-mr/src/main/java/org/apache/kylin/engine/mr/steps/FactDistinctColumnsJob.java
@@ -78,6 +78,19 @@ public class FactDistinctColumnsJob extends 
AbstractHadoopJob {
             // add metadata to distributed cache
             CubeManager cubeMgr = 
CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
             CubeInstance cube = cubeMgr.getCube(cubeName);
+            CubeSegment segment = cube.getSegmentById(segmentID);
+            if (segment == null) {
+                logger.warn("Failed to find segment {} in cube {}", segmentID, 
cube);
+                cube = cubeMgr.reloadCubeQuietly(cubeName);
+                segment = cube.getSegmentById(segmentID);
+            }
+            if (segment == null) {
+                logger.error("Failed to find {} in cube {}", segmentID, cube);
+                for (CubeSegment s : cube.getSegments()) {
+                    logger.error(s.getName() + " with status " + 
s.getStatus());
+                }
+                throw new IllegalStateException();
+            }
 
             job.getConfiguration().set(BatchConstants.CFG_CUBE_NAME, cubeName);
             job.getConfiguration().set(BatchConstants.CFG_CUBE_SEGMENT_ID, 
segmentID);
@@ -88,15 +101,6 @@ public class FactDistinctColumnsJob extends 
AbstractHadoopJob {
 
             setJobClasspath(job, cube.getConfig());
 
-            CubeSegment segment = cube.getSegmentById(segmentID);
-            if (segment == null) {
-                logger.error("Failed to find {} in cube {}", segmentID, cube);
-                for (CubeSegment s : cube.getSegments()) {
-                    logger.error(s.getName() + " with status " + 
s.getStatus());
-                }
-                throw new IllegalStateException();
-            }
-
             setupMapper(segment);
             setupReducer(output, segment);
 

Reply via email to