HIVE-10506: CBO (Calcite Return Path): Disallow return path to be enable if CBO 
is off (Jesus Camacho Rodriguez via Laljo John Pullokkaran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/93995c8b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/93995c8b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/93995c8b

Branch: refs/heads/beeline-cli
Commit: 93995c8be3dedc8785ced64939c608ae2433d4af
Parents: ecde4ae
Author: John Pullokkaran <jpullokka...@hortonworks.com>
Authored: Wed May 6 18:15:33 2015 -0700
Committer: jpullokk <jpull...@apache.org>
Committed: Wed May 6 18:21:19 2015 -0700

----------------------------------------------------------------------
 ql/src/java/org/apache/hadoop/hive/ql/Context.java      | 10 +++++++++-
 .../hive/ql/optimizer/IdentityProjectRemover.java       | 12 ++++++++++++
 .../hive/ql/optimizer/NonBlockingOpDeDupProc.java       | 11 +++++++++++
 .../org/apache/hadoop/hive/ql/optimizer/Optimizer.java  |  8 +++-----
 .../calcite/translator/HiveOpConverterPostProc.java     | 10 ++++++++++
 .../org/apache/hadoop/hive/ql/parse/CalcitePlanner.java |  1 +
 6 files changed, 46 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/93995c8b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
index 9692738..a74bbbe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java
@@ -23,7 +23,6 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
@@ -86,6 +85,7 @@ public class Context {
   protected int pathid = 10000;
   protected boolean explain = false;
   protected String cboInfo;
+  protected boolean cboSucceeded;
   protected boolean explainLogical = false;
   protected String cmd = "";
   // number of previous attempts
@@ -706,4 +706,12 @@ public class Context {
     this.cboInfo = cboInfo;
   }
 
+  public boolean isCboSucceeded() {
+    return cboSucceeded;
+  }
+
+  public void setCboSucceeded(boolean cboSucceeded) {
+    this.cboSucceeded = cboSucceeded;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/93995c8b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java
index 433699b..e3d3ce6 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/IdentityProjectRemover.java
@@ -26,8 +26,10 @@ import java.util.Stack;
 
 import com.google.common.base.Predicates;
 import com.google.common.collect.Iterators;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
@@ -69,6 +71,16 @@ public class IdentityProjectRemover implements Transform {
   private static final Log LOG = 
LogFactory.getLog(IdentityProjectRemover.class);
   @Override
   public ParseContext transform(ParseContext pctx) throws SemanticException {
+    // 0. We check the conditions to apply this transformation,
+    //    if we do not meet them we bail out
+    final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), 
HiveConf.ConfVars.HIVE_CBO_ENABLED);
+    final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), 
HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP);
+    final boolean cboSucceeded = pctx.getContext().isCboSucceeded();
+    if(cboEnabled && returnPathEnabled && cboSucceeded) {
+      return pctx;
+    }
+
+    // 1. We apply the transformation
     Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, 
NodeProcessor>();
     opRules.put(new RuleRegExp("R1",
       "(" + SelectOperator.getOperatorName() + "%)"), new ProjectRemover());

http://git-wip-us.apache.org/repos/asf/hive/blob/93995c8b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
index 95c2b0b..3006a6e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/NonBlockingOpDeDupProc.java
@@ -28,6 +28,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
@@ -57,6 +58,16 @@ public class NonBlockingOpDeDupProc implements Transform {
 
   @Override
   public ParseContext transform(ParseContext pctx) throws SemanticException {
+    // 0. We check the conditions to apply this transformation,
+    //    if we do not meet them we bail out
+    final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), 
HiveConf.ConfVars.HIVE_CBO_ENABLED);
+    final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), 
HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP);
+    final boolean cboSucceeded = pctx.getContext().isCboSucceeded();
+    if(cboEnabled && returnPathEnabled && cboSucceeded) {
+      return pctx;
+    }
+
+    // 1. We apply the transformation
     String SEL = SelectOperator.getOperatorName();
     String FIL = FilterOperator.getOperatorName();
     Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, 
NodeProcessor>();

http://git-wip-us.apache.org/repos/asf/hive/blob/93995c8b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
index 58f8afe..a7cf8b7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
@@ -62,11 +62,9 @@ public class Optimizer {
 
     transformations = new ArrayList<Transform>();
 
-    // If we are translating Calcite operators into Hive operators, we need
-    // additional postprocessing
-    if(HiveConf.getBoolVar(hiveConf, 
HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) {
-      transformations.add(new HiveOpConverterPostProc());
-    }
+    // Add the additional postprocessing transformations needed if
+    // we are translating Calcite operators into Hive operators.
+    transformations.add(new HiveOpConverterPostProc());
 
     // Add the transformation that computes the lineage information.
     transformations.add(new Generator());

http://git-wip-us.apache.org/repos/asf/hive/blob/93995c8b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java
index cdd7c7e..e7c8342 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/HiveOpConverterPostProc.java
@@ -26,6 +26,7 @@ import java.util.Stack;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
@@ -55,6 +56,15 @@ public class HiveOpConverterPostProc implements Transform {
 
   @Override
   public ParseContext transform(ParseContext pctx) throws SemanticException {
+    // 0. We check the conditions to apply this transformation,
+    //    if we do not meet them we bail out
+    final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), 
HiveConf.ConfVars.HIVE_CBO_ENABLED);
+    final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), 
HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP);
+    final boolean cboSucceeded = pctx.getContext().isCboSucceeded();
+    if(!(cboEnabled && returnPathEnabled && cboSucceeded)) {
+      return pctx;
+    }
+
     // 1. Initialize aux data structures
     this.pctx = pctx;
     this.aliasToOpInfo = new HashMap<String, Operator<? extends 
OperatorDesc>>();

http://git-wip-us.apache.org/repos/asf/hive/blob/93995c8b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index 49ad6ad..48f488f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -265,6 +265,7 @@ public class CalcitePlanner extends SemanticAnalyzer {
           sinkOp = genPlan(getQB());
           LOG.info("CBO Succeeded; optimized logical plan.");
           this.ctx.setCboInfo("Plan optimized by CBO.");
+          this.ctx.setCboSucceeded(true);
           LOG.debug(newAST.dump());
           } 
         } catch (Exception e) {

Reply via email to