pfzhan commented on code in PR #2089:
URL: https://github.com/apache/kylin/pull/2089#discussion_r1110716640


##########
src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkCubingJob.java:
##########
@@ -422,4 +431,53 @@ static class NSparkCubingJobStep {
         private final AbstractExecutable secondStorage;
         private final AbstractExecutable cleanUpTransactionalTable;
     }
+
+    private static void enableCostBasedPlannerIfNeed(NDataflow df, 
Set<NDataSegment> segments, NSparkCubingJob job) {
+        // need run the cost based planner:
+        // 1. config enable the cube planner
+        // 2. the model dose not have the `layout_cost_based_pruned_list`
+        // 3. rule index has agg group
+        // 4. just only one segment to be built/refresh(other case will throw 
exception)
+        IndexPlan indexPlan = df.getIndexPlan();
+        KylinConfig kylinConfig = indexPlan.getConfig();
+        boolean needCostRecommendIndex = indexPlan.getRuleBasedIndex() != null
+                && indexPlan.getRuleBasedIndex().getLayoutsOfCostBasedList() 
== null
+                && 
!indexPlan.getRuleBasedIndex().getAggregationGroups().isEmpty();
+        if (kylinConfig.enableCostBasedIndexPlanner() && needCostRecommendIndex
+                && canEnablePlannerJob(job.getJobType())) {
+            // must run the cost based planner
+            if (segments.size() == 1) {
+                if (noBuildingSegmentExist(df.getProject(), 
job.getTargetSubject(), kylinConfig)) {
+                    // check the count of rowkey:
+                    // if the count of row key exceed the 63, throw exception
+                    if 
(indexPlan.getRuleBasedIndex().countOfIncludeDimension() > (Long.SIZE - 1)) {
+                        throw new RuntimeException(String.format(
+                                "The count of row key %d can't be larger than 
63, when use the cube planner",
+                                
indexPlan.getRuleBasedIndex().countOfIncludeDimension()));
+                    }
+                    // Add the parameter `P_JOB_ENABLE_PLANNER` which is used 
to decide whether to use the  cube planner
+                    job.setParam(NBatchConstants.P_JOB_ENABLE_PLANNER, 
Boolean.TRUE.toString());
+                } else {
+                    throw new RuntimeException(
+                            "There are running job for this model when submit 
the build job with cost based planner, "
+                                    + "please wait for other jobs to finish or 
cancel them");
+                }
+            } else {
+                throw new RuntimeException("The number of segments to be built 
or refreshed must be 1, "

Review Comment:
   同上



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to