This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new 65326a5  [SPARK-33992][SQL] override transformUpWithNewOutput to add 
allowInvokingTransformsInAnalyzer
65326a5 is described below

commit 65326a52c14b1105ad110c1ca8957303c9b51848
Author: Kent Yao <y...@apache.org>
AuthorDate: Tue Jan 5 05:34:11 2021 +0000

    [SPARK-33992][SQL] override transformUpWithNewOutput to add 
allowInvokingTransformsInAnalyzer
    
    ### What changes were proposed in this pull request?
    
    In https://github.com/apache/spark/pull/29643, we move  the plan rewriting 
methods to QueryPlan. we need to override transformUpWithNewOutput to add 
allowInvokingTransformsInAnalyzer
     because it and resolveOperatorsUpWithNewOutput are called in the analyzer.
    For example,
    
    PaddingAndLengthCheckForCharVarchar could fail query when 
resolveOperatorsUpWithNewOutput
    with
    ```logtalk
    [info] - char/varchar resolution in sub query  *** FAILED *** (367 
milliseconds)
    [info]   java.lang.RuntimeException: This method should not be called in 
the analyzer
    [info]   at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.assertNotAnalysisRule(AnalysisHelper.scala:150)
    [info]   at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.assertNotAnalysisRule$(AnalysisHelper.scala:146)
    [info]   at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.assertNotAnalysisRule(LogicalPlan.scala:29)
    [info]   at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:161)
    [info]   at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:160)
    [info]   at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
    [info]   at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
    [info]   at 
org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$updateOuterReferencesInSubquery(QueryPlan.scala:267)
    ```
    ### Why are the changes needed?
    
    trivial bugfix
    ### Does this PR introduce _any_ user-facing change?
    
    no
    ### How was this patch tested?
    
    new tests
    
    Closes #31013 from yaooqinn/SPARK-33992.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit f0ffe0cd652188873f2ec007e4e282744717a0b3)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../spark/sql/catalyst/plans/logical/AnalysisHelper.scala |  9 +++++++++
 .../scala/org/apache/spark/sql/CharVarcharTestSuite.scala | 15 +++++++++++++++
 2 files changed, 24 insertions(+)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/AnalysisHelper.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/AnalysisHelper.scala
index ffd1f78..54b0141 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/AnalysisHelper.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/AnalysisHelper.scala
@@ -133,6 +133,15 @@ trait AnalysisHelper extends QueryPlan[LogicalPlan] { 
self: LogicalPlan =>
     }
   }
 
+  override def transformUpWithNewOutput(
+      rule: PartialFunction[LogicalPlan, (LogicalPlan, Seq[(Attribute, 
Attribute)])],
+      skipCond: LogicalPlan => Boolean,
+      canGetOutput: LogicalPlan => Boolean): LogicalPlan = {
+    AnalysisHelper.allowInvokingTransformsInAnalyzer {
+      super.transformUpWithNewOutput(rule, skipCond, canGetOutput)
+    }
+  }
+
   /**
    * Recursively transforms the expressions of a tree, skipping nodes that 
have already
    * been analyzed.
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/CharVarcharTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/CharVarcharTestSuite.scala
index 62d0f51..d20cee0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CharVarcharTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CharVarcharTestSuite.scala
@@ -451,6 +451,21 @@ trait CharVarcharTestSuite extends QueryTest with 
SQLTestUtils {
         Seq(Row("char(5)"), Row("varchar(3)")))
     }
   }
+
+  test("SPARK-33992: char/varchar resolution in correlated sub query") {
+    withTable("t1", "t2") {
+      sql(s"CREATE TABLE t1(v VARCHAR(3), c CHAR(5)) USING $format")
+      sql(s"CREATE TABLE t2(v VARCHAR(3), c CHAR(5)) USING $format")
+      sql("INSERT INTO t1 VALUES ('c', 'b')")
+      sql("INSERT INTO t2 VALUES ('a', 'b')")
+
+      checkAnswer(sql(
+        """
+          |SELECT v FROM t1
+          |WHERE 'a' IN (SELECT v FROM t2 WHERE t1.c = t2.c )""".stripMargin),
+        Row("c"))
+    }
+  }
 }
 
 // Some basic char/varchar tests which doesn't rely on table implementation.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to