beliefer commented on a change in pull request #27058: [SPARK-30395][SQL] When 
one or more DISTINCT aggregate expressions operate on the same field, the 
DISTINCT aggregate expression allows the use of the FILTER clause
URL: https://github.com/apache/spark/pull/27058#discussion_r365235877
 
 

 ##########
 File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RewriteDistinctAggregates.scala
 ##########
 @@ -316,6 +362,86 @@ object RewriteDistinctAggregates extends 
Rule[LogicalPlan] {
         }.asInstanceOf[NamedExpression]
       }
       Aggregate(groupByAttrs, patchedAggExpressions, firstAggregate)
+    } else if (distinctAggGroups.size == 1) {
+      val (distinctAggExpressions, regularAggExpressions) = 
aggExpressions.partition(_.isDistinct)
+      if (distinctAggExpressions.exists(_.filter.isDefined)) {
+        val regularAggExprs = regularAggExpressions.filter(e => 
e.children.exists(!_.foldable))
+        val regularFunChildren = regularAggExprs
+          .flatMap(_.aggregateFunction.children.filter(!_.foldable))
+        val regularFilterAttrs = regularAggExprs.flatMap(_.filterAttributes)
+        val regularAggChildren = (regularFunChildren ++ 
regularFilterAttrs).distinct
+        val regularAggChildAttrMap = 
regularAggChildren.map(expressionAttributePair)
+        val regularAggChildAttrLookup = regularAggChildAttrMap.toMap
+        val regularOperatorMap = regularAggExprs.map {
+          case ae @ AggregateExpression(af, _, _, filter, _) =>
+            val newChildren = af.children.map(c => 
regularAggChildAttrLookup.getOrElse(c, c))
+            val raf = 
af.withNewChildren(newChildren).asInstanceOf[AggregateFunction]
+            val filterOpt = filter.map(_.transform {
+              case a: Attribute => regularAggChildAttrLookup.getOrElse(a, a)
+            })
+            val aggExpr = ae.copy(aggregateFunction = raf, filter = filterOpt)
+            (ae, aggExpr)
+        }
+        val distinctAggExprs = distinctAggExpressions.filter(e => 
e.children.exists(!_.foldable))
+        val rewriteDistinctOperatorMap = distinctAggExprs.zipWithIndex.map {
+          case (ae @ AggregateExpression(af, _, _, filter, _), i) =>
+            // Why do we need to construct the phantom id ?
+            // First, In order to reduce costs, it is better to handle the 
filter clause locally.
+            // e.g. COUNT (DISTINCT a) FILTER (WHERE id > 1), evaluate 
expression
+            // If(id > 1) 'a else null first, and use the result as output.
+            // Second, If more than one DISTINCT aggregate expression uses the 
same column,
+            // We need to construct the phantom attributes so as the output 
not lost.
+            // e.g. SUM (DISTINCT a), COUNT (DISTINCT a) FILTER (WHERE id > 1) 
will output
+            // attribute 'phantom1-a and attribute 'phantom2-a instead of two 
'a.
+            // Note: We just need to illusion the expression with filter 
clause.
+            // The illusionary mechanism may result in multiple distinct 
aggregations uses
+            // different column, so we still need to call `rewrite`.
+            val phantomId = i + 1
+            val unfoldableChildren = af.children.filter(!_.foldable)
+            val exprAttrs = unfoldableChildren.map { e =>
+              (e, AttributeReference(s"phantom$phantomId-${e.sql}", 
e.dataType, nullable = true)())
 
 Review comment:
   OK. Thanks!

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to