This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this 
push:
     new 698af2c  KYLIN-4723 Set the configurations about shard by to cube level
698af2c is described below

commit 698af2ca3401f64c4d296be88a1a14621d03d0c6
Author: rupengwang <wangrup...@live.cn>
AuthorDate: Thu Aug 27 18:29:16 2020 +0800

    KYLIN-4723 Set the configurations about shard by to cube level
---
 .../main/scala/org/apache/kylin/engine/spark/job/CubeBuildJob.java    | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git 
a/kylin-spark-project/kylin-spark-engine/src/main/scala/org/apache/kylin/engine/spark/job/CubeBuildJob.java
 
b/kylin-spark-project/kylin-spark-engine/src/main/scala/org/apache/kylin/engine/spark/job/CubeBuildJob.java
index 80b0674..3a44d84 100644
--- 
a/kylin-spark-project/kylin-spark-engine/src/main/scala/org/apache/kylin/engine/spark/job/CubeBuildJob.java
+++ 
b/kylin-spark-project/kylin-spark-engine/src/main/scala/org/apache/kylin/engine/spark/job/CubeBuildJob.java
@@ -75,6 +75,7 @@ public class CubeBuildJob extends SparkApplication {
     protected static String TEMP_DIR_SUFFIX = "_temp";
 
     private CubeManager cubeManager;
+    private CubeInstance cubeInstance;
     private BuildLayoutWithUpdate buildLayoutWithUpdate;
     private Map<Long, Short> cuboidShardNum = Maps.newHashMap();
     public static void main(String[] args) {
@@ -90,6 +91,7 @@ public class CubeBuildJob extends SparkApplication {
         buildLayoutWithUpdate = new BuildLayoutWithUpdate();
         Set<String> segmentIds = 
Sets.newHashSet(StringUtils.split(getParam(MetadataConstants.P_SEGMENT_IDS)));
         cubeManager = CubeManager.getInstance(config);
+        cubeInstance = 
cubeManager.getCubeByUuid(getParam(MetadataConstants.P_CUBE_ID));
         List<String> persistedFlatTable = new ArrayList<>();
         List<String> persistedViewFactTable = new ArrayList<>();
         Path shareDir = config.getJobTmpShareDir(project, jobId);
@@ -353,7 +355,7 @@ public class CubeBuildJob extends SparkApplication {
             layout.setRows(rowCount);
             
layout.setSourceRows(metrics.getMetrics(Metrics.SOURCE_ROWS_CNT()));
         }
-        int shardNum = BuildUtils.repartitionIfNeed(layout, storage, path, 
tempPath, config, ss);
+        int shardNum = BuildUtils.repartitionIfNeed(layout, storage, path, 
tempPath, cubeInstance.getConfig(), ss);
         layout.setShardNum(shardNum);
         cuboidShardNum.put(layoutId, (short)shardNum);
         
ss.sparkContext().setLocalProperty(QueryExecutionCache.N_EXECUTION_ID_KEY(), 
null);

Reply via email to