[ 
https://issues.apache.org/jira/browse/HIVE-23893?focusedWorklogId=463597&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-463597
 ]

ASF GitHub Bot logged work on HIVE-23893:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 27/Jul/20 11:32
            Start Date: 27/Jul/20 11:32
    Worklog Time Spent: 10m 
      Work Description: dengzhhu653 commented on a change in pull request #1322:
URL: https://github.com/apache/hive/pull/1322#discussion_r460826125



##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
##########
@@ -782,6 +790,89 @@ protected ExprWalkerInfo mergeChildrenPred(Node nd, 
OpWalkerInfo owi,
     }
   }
 
+  protected static Object splitFilter(FilterOperator op,
+      ExprWalkerInfo ewi, OpWalkerInfo owi) throws SemanticException {
+
+    RowSchema inputRS = op.getSchema();
+
+    Map<String, List<ExprNodeDesc>> pushDownPreds = ewi.getFinalCandidates();
+    Map<String, List<ExprNodeDesc>> unPushDownPreds = 
ewi.getNonFinalCandidates();
+
+    // combine all deterministic predicates into a single expression
+    List<ExprNodeDesc> deterministicPreds = new ArrayList<ExprNodeDesc>();
+    Iterator<List<ExprNodeDesc>> iterator1 = pushDownPreds.values().iterator();
+    while (iterator1.hasNext()) {
+      for (ExprNodeDesc pred : iterator1.next()) {
+        deterministicPreds = ExprNodeDescUtils.split(pred, deterministicPreds);
+      }
+    }
+
+    if (deterministicPreds.isEmpty()) {
+      return null;
+    }
+
+    List<ExprNodeDesc> nondeterministicPreds = new ArrayList<ExprNodeDesc>();
+    Iterator<List<ExprNodeDesc>> iterator2 = 
unPushDownPreds.values().iterator();
+    while (iterator2.hasNext()) {
+      for (ExprNodeDesc pred : iterator2.next()) {
+        nondeterministicPreds = ExprNodeDescUtils.split(pred, 
nondeterministicPreds);
+      }
+    }
+
+    assert !nondeterministicPreds.isEmpty();
+
+    ExprNodeDesc deterministicCondn = 
ExprNodeDescUtils.mergePredicates(deterministicPreds);
+    ExprNodeDesc nondeterministicCondn = 
ExprNodeDescUtils.mergePredicates(nondeterministicPreds);
+
+    Operator<FilterDesc> deterministicFilter =
+        OperatorFactory.get(new FilterDesc(deterministicCondn, false), new 
RowSchema(inputRS.getSignature()));
+
+    deterministicFilter.setChildOperators(new ArrayList<Operator<? extends 
OperatorDesc>>());
+    deterministicFilter.getChildOperators().add(op);
+
+    List<Operator<? extends OperatorDesc>> originalParents = op
+        .getParentOperators();
+    for (Operator<? extends OperatorDesc> parent : originalParents) {
+      List<Operator<? extends OperatorDesc>> childOperators = parent
+          .getChildOperators();
+      int pos = childOperators.indexOf(op);
+      childOperators.remove(pos);
+      childOperators.add(pos, deterministicFilter);
+
+      int pPos = op.getParentOperators().indexOf(parent);
+      deterministicFilter.getParentOperators().add(pPos, parent);
+    }
+
+    op.getParentOperators().clear();
+    op.getParentOperators().add(deterministicFilter);
+    op.getConf().setPredicate(nondeterministicCondn);
+
+    if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
+        HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
+      // remove the candidate filter ops
+      for (FilterOperator fop : owi.getCandidateFilterOps()) {
+        List<Operator<? extends OperatorDesc>> children = 
fop.getChildOperators();
+        List<Operator<? extends OperatorDesc>> parents = 
fop.getParentOperators();
+        for (Operator<? extends OperatorDesc> parent : parents) {
+          parent.getChildOperators().addAll(children);
+          parent.removeChild(fop);
+        }
+        for (Operator<? extends OperatorDesc> child : children) {
+          child.getParentOperators().addAll(parents);
+          child.removeParent(fop);
+        }
+      }
+      owi.getCandidateFilterOps().clear();
+    }
+
+    ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op,
+        deterministicFilter.getConf().getPredicate());
+
+    owi.putPrunedPreds(deterministicFilter, ewi);

Review comment:
       the deterministicFilter should add to OpWalkerInfo.candidateFilterOps 
and no need to extractPushdownPreds again

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
##########
@@ -782,6 +790,89 @@ protected ExprWalkerInfo mergeChildrenPred(Node nd, 
OpWalkerInfo owi,
     }
   }
 
+  protected static Object splitFilter(FilterOperator op,
+      ExprWalkerInfo ewi, OpWalkerInfo owi) throws SemanticException {
+
+    RowSchema inputRS = op.getSchema();
+
+    Map<String, List<ExprNodeDesc>> pushDownPreds = ewi.getFinalCandidates();
+    Map<String, List<ExprNodeDesc>> unPushDownPreds = 
ewi.getNonFinalCandidates();
+
+    // combine all deterministic predicates into a single expression
+    List<ExprNodeDesc> deterministicPreds = new ArrayList<ExprNodeDesc>();
+    Iterator<List<ExprNodeDesc>> iterator1 = pushDownPreds.values().iterator();
+    while (iterator1.hasNext()) {
+      for (ExprNodeDesc pred : iterator1.next()) {
+        deterministicPreds = ExprNodeDescUtils.split(pred, deterministicPreds);
+      }
+    }
+
+    if (deterministicPreds.isEmpty()) {
+      return null;
+    }
+
+    List<ExprNodeDesc> nondeterministicPreds = new ArrayList<ExprNodeDesc>();
+    Iterator<List<ExprNodeDesc>> iterator2 = 
unPushDownPreds.values().iterator();
+    while (iterator2.hasNext()) {
+      for (ExprNodeDesc pred : iterator2.next()) {
+        nondeterministicPreds = ExprNodeDescUtils.split(pred, 
nondeterministicPreds);
+      }
+    }
+
+    assert !nondeterministicPreds.isEmpty();
+
+    ExprNodeDesc deterministicCondn = 
ExprNodeDescUtils.mergePredicates(deterministicPreds);
+    ExprNodeDesc nondeterministicCondn = 
ExprNodeDescUtils.mergePredicates(nondeterministicPreds);
+
+    Operator<FilterDesc> deterministicFilter =
+        OperatorFactory.get(new FilterDesc(deterministicCondn, false), new 
RowSchema(inputRS.getSignature()));
+
+    deterministicFilter.setChildOperators(new ArrayList<Operator<? extends 
OperatorDesc>>());
+    deterministicFilter.getChildOperators().add(op);
+
+    List<Operator<? extends OperatorDesc>> originalParents = op
+        .getParentOperators();
+    for (Operator<? extends OperatorDesc> parent : originalParents) {
+      List<Operator<? extends OperatorDesc>> childOperators = parent
+          .getChildOperators();
+      int pos = childOperators.indexOf(op);
+      childOperators.remove(pos);
+      childOperators.add(pos, deterministicFilter);
+
+      int pPos = op.getParentOperators().indexOf(parent);
+      deterministicFilter.getParentOperators().add(pPos, parent);
+    }
+
+    op.getParentOperators().clear();
+    op.getParentOperators().add(deterministicFilter);
+    op.getConf().setPredicate(nondeterministicCondn);
+
+    if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
+        HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
+      // remove the candidate filter ops
+      for (FilterOperator fop : owi.getCandidateFilterOps()) {

Review comment:
       why do we remove the candidate filter operation here?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
##########
@@ -782,6 +790,89 @@ protected ExprWalkerInfo mergeChildrenPred(Node nd, 
OpWalkerInfo owi,
     }
   }
 
+  protected static Object splitFilter(FilterOperator op,
+      ExprWalkerInfo ewi, OpWalkerInfo owi) throws SemanticException {
+
+    RowSchema inputRS = op.getSchema();
+
+    Map<String, List<ExprNodeDesc>> pushDownPreds = ewi.getFinalCandidates();
+    Map<String, List<ExprNodeDesc>> unPushDownPreds = 
ewi.getNonFinalCandidates();
+
+    // combine all deterministic predicates into a single expression
+    List<ExprNodeDesc> deterministicPreds = new ArrayList<ExprNodeDesc>();
+    Iterator<List<ExprNodeDesc>> iterator1 = pushDownPreds.values().iterator();
+    while (iterator1.hasNext()) {
+      for (ExprNodeDesc pred : iterator1.next()) {
+        deterministicPreds = ExprNodeDescUtils.split(pred, deterministicPreds);
+      }
+    }
+
+    if (deterministicPreds.isEmpty()) {
+      return null;
+    }
+
+    List<ExprNodeDesc> nondeterministicPreds = new ArrayList<ExprNodeDesc>();
+    Iterator<List<ExprNodeDesc>> iterator2 = 
unPushDownPreds.values().iterator();
+    while (iterator2.hasNext()) {
+      for (ExprNodeDesc pred : iterator2.next()) {
+        nondeterministicPreds = ExprNodeDescUtils.split(pred, 
nondeterministicPreds);
+      }
+    }
+
+    assert !nondeterministicPreds.isEmpty();
+
+    ExprNodeDesc deterministicCondn = 
ExprNodeDescUtils.mergePredicates(deterministicPreds);
+    ExprNodeDesc nondeterministicCondn = 
ExprNodeDescUtils.mergePredicates(nondeterministicPreds);
+
+    Operator<FilterDesc> deterministicFilter =
+        OperatorFactory.get(new FilterDesc(deterministicCondn, false), new 
RowSchema(inputRS.getSignature()));
+
+    deterministicFilter.setChildOperators(new ArrayList<Operator<? extends 
OperatorDesc>>());

Review comment:
       use OpProcFactory.createFilter may be better?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
##########
@@ -782,6 +790,89 @@ protected ExprWalkerInfo mergeChildrenPred(Node nd, 
OpWalkerInfo owi,
     }
   }
 
+  protected static Object splitFilter(FilterOperator op,
+      ExprWalkerInfo ewi, OpWalkerInfo owi) throws SemanticException {
+
+    RowSchema inputRS = op.getSchema();
+
+    Map<String, List<ExprNodeDesc>> pushDownPreds = ewi.getFinalCandidates();
+    Map<String, List<ExprNodeDesc>> unPushDownPreds = 
ewi.getNonFinalCandidates();
+
+    // combine all deterministic predicates into a single expression
+    List<ExprNodeDesc> deterministicPreds = new ArrayList<ExprNodeDesc>();
+    Iterator<List<ExprNodeDesc>> iterator1 = pushDownPreds.values().iterator();
+    while (iterator1.hasNext()) {
+      for (ExprNodeDesc pred : iterator1.next()) {
+        deterministicPreds = ExprNodeDescUtils.split(pred, deterministicPreds);

Review comment:
       The ```ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate)``` 
 has already splited the pred, maybe there is no need to split again

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
##########
@@ -417,9 +417,17 @@ public Object process(Node nd, Stack<Node> stack, 
NodeProcessorCtx procCtx,
         if (!ewi.isDeterministic()) {
           /* predicate is not deterministic */
           if (op.getChildren() != null && op.getChildren().size() == 1) {
+            ExprWalkerInfo prunedPreds = owi.getPrunedPreds((Operator<? 
extends OperatorDesc>) (op
+                .getChildren().get(0)));
+            //resolve of HIVE-23893
+            if (!(prunedPreds != null && prunedPreds.hasAnyCandidates())

Review comment:
       why the pruned predicates of the child could not be pushed down? 




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 463597)
    Time Spent: 0.5h  (was: 20m)

> Extract deterministic conditions for pdd when the predicate contains 
> non-deterministic function
> -----------------------------------------------------------------------------------------------
>
>                 Key: HIVE-23893
>                 URL: https://issues.apache.org/jira/browse/HIVE-23893
>             Project: Hive
>          Issue Type: Improvement
>          Components: Logical Optimizer
>            Reporter: Zhihua Deng
>            Assignee: zhishui
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 0.5h
>  Remaining Estimate: 0h
>
> Taken the following query for example, assume unix_timestamp is 
> non-deterministic before version 1.3.0:
>   
>  {{SELECT}}
>  {{        from_unixtime(unix_timestamp(a.first_dt), 'yyyyMMdd') AS ft,}}
>  {{        b.game_id AS game_id,}}
>  {{        b.game_name AS game_name,}}
>  {{        count(DISTINCT a.sha1_imei) uv}}
>  {{FROM}}
>  {{        gamesdk_userprofile a}}
>  {{        JOIN game_info_all b ON a.appid = b.dev_app_id}}
>  {{WHERE}}
>  {{        a.date = 20200704}}
>  {{        AND from_unixtime(unix_timestamp(a.first_dt), 'yyyyMMdd') = 
> 20200704}}
>  {{        AND b.date = 20200704}}
>  {{GROUP BY}}
>  {{        from_unixtime(unix_timestamp(a.first_dt), 'yyyyMMdd'),}}
>  {{        b.game_id,}}
>  {{        b.game_name}}
>  {{ORDER BY}}
>  {{        uv DESC}}
>  {{LIMIT 200;}}
>   
>  The predicates(a.date = 20200704, b.date = 20200704) are unable to push down 
> to join op, make the optimizer unable to prune partitions, which may result  
> to a full scan on tables gamesdk_userprofile and game_info_all.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to