Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22899#discussion_r238483571
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 ---
    @@ -880,21 +880,38 @@ class Analyzer(
           }
         }
     
    -    private def resolve(e: Expression, q: LogicalPlan): Expression = e 
match {
    -      case f: LambdaFunction if !f.bound => f
    -      case u @ UnresolvedAttribute(nameParts) =>
    -        // Leave unchanged if resolution fails. Hopefully will be resolved 
next round.
    -        val result =
    -          withPosition(u) {
    -            q.resolveChildren(nameParts, resolver)
    -              .orElse(resolveLiteralFunction(nameParts, u, q))
    -              .getOrElse(u)
    -          }
    -        logDebug(s"Resolving $u to $result")
    -        result
    -      case UnresolvedExtractValue(child, fieldExpr) if child.resolved =>
    -        ExtractValue(child, fieldExpr, resolver)
    -      case _ => e.mapChildren(resolve(_, q))
    +    /**
    +     * Resolves the attribute and extract value expressions(s) by 
traversing the
    +     * input expression in top down manner. The traversal is done in 
top-down manner as
    +     * we need to skip over unbound lamda function expression. The lamda 
expressions are
    +     * resolved in a different rule [[ResolveLambdaVariables]]
    +     *
    +     * Example :
    +     * SELECT transform(array(1, 2, 3), (x, i) -> x + i)"
    +     *
    +     * In the case above, x and i are resolved as lamda variables in 
[[ResolveLambdaVariables]]
    +     *
    +     * Note : In this routine, the unresolved attributes are resolved from 
the input plan's
    +     * children attributes.
    +     */
    +    private def resolveExpressionTopDown(e: Expression, q: LogicalPlan): 
Expression = {
    +      if (e.resolved) return e
    --- End diff --
    
    A good catch!


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to