Github user gengliangwang commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19451#discussion_r143870225
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
 ---
    @@ -1242,6 +1243,54 @@ object ReplaceIntersectWithSemiJoin extends 
Rule[LogicalPlan] {
     }
     
     /**
    + * If one or both of the datasets in the logical [[Except]] operator are 
purely transformed using
    + * [[Filter]], this rule will replace logical [[Except]] operator with a 
[[Filter]] operator by
    + * flipping the filter condition of the right child.
    + * {{{
    + *   SELECT a1, a2 FROM Tab1 WHERE a2 = 12 EXCEPT SELECT a1, a2 FROM Tab1 
WHERE a1 = 5
    + *   ==>  SELECT DISTINCT a1, a2 FROM Tab1 WHERE a2 = 12 AND a1 <> 5
    + * }}}
    + *
    + * Note:
    + * 1. We should combine all the [[Filter]] of the right node before 
flipping it using NOT operator.
    + */
    +object ReplaceExceptWithNotFilter extends Rule[LogicalPlan] {
    +
    +  import scala.language.implicitConversions
    +  implicit def nodeToFilter(node: LogicalPlan): Filter = 
node.asInstanceOf[Filter]
    +
    +  def apply(plan: LogicalPlan): LogicalPlan = plan transform {
    +    case Except(left, right) if isEligible(left, right) =>
    +      Distinct(
    +        Filter(Not(replaceAttributesIn(combineFilters(right).condition, 
left)), left)
    +      )
    +  }
    +
    +  def isEligible(left: LogicalPlan, right: LogicalPlan): Boolean = (left, 
right) match {
    +    case (left, right: Filter) => child(left).sameResult(child(right))
    +    case _ => false
    +  }
    +
    +  def child(plan: LogicalPlan): LogicalPlan = plan match {
    +    case _ : Filter => child(plan.child)
    +    case x => x
    +  }
    +
    +  def combineFilters(plan: LogicalPlan): LogicalPlan = 
CombineFilters(plan) match {
    --- End diff --
    
    I think we can change this to a loop, or tail recursive call without match.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to