This is an automated email from the ASF dual-hosted git repository. wenchen pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new c1d3fc1de30e [SPARK-46481] Execute immediate VariableReference foldable c1d3fc1de30e is described below commit c1d3fc1de30e6c3e453592cac485e674a864692c Author: milastdbx <milan.stefano...@databricks.com> AuthorDate: Mon Dec 25 22:28:17 2023 +0800 [SPARK-46481] Execute immediate VariableReference foldable ### What changes were proposed in this pull request? As part of EXECUTE IMMEDIATE statement, we are doing variable resolution, and [previous PR ](https://github.com/apache/spark/pull/44093) introduced copy/paste issue from SET variable `canFold = false`. This if fine for SET command, but for parameters should be foldable to match regular query behaviour with same pattern. ### Why are the changes needed? To align parameterized and non parameterized queries ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Manually Closes #44450 from milastdbx/dev/milast/executeImmediateFoldVarReference. Authored-by: milastdbx <milan.stefano...@databricks.com> Signed-off-by: Wenchen Fan <wenc...@databricks.com> --- .../spark/sql/catalyst/analysis/executeImmediate.scala | 2 +- .../scala/org/apache/spark/sql/ParametersSuite.scala | 18 +++++++++++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/executeImmediate.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/executeImmediate.scala index 8fc373b71f25..7cc496616128 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/executeImmediate.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/executeImmediate.scala @@ -177,7 +177,7 @@ class SubstituteExecuteImmediate(val catalogManager: CatalogManager) private def getVariableReference(expr: Expression, nameParts: Seq[String]): VariableReference = { lookupVariable(nameParts) match { - case Some(variable) => variable.copy(canFold = false) + case Some(variable) => variable case _ => throw QueryCompilationErrors .unresolvedVariableError( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala index 974def7f3b85..2801948f6837 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala @@ -21,11 +21,12 @@ import java.time.{Instant, LocalDate, LocalDateTime, ZoneId} import org.apache.spark.sql.catalyst.expressions.Literal import org.apache.spark.sql.catalyst.parser.ParseException +import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.functions.{array, call_function, lit, map, map_from_arrays, map_from_entries, str_to_map, struct} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession -class ParametersSuite extends QueryTest with SharedSparkSession { +class ParametersSuite extends QueryTest with SharedSparkSession with PlanTest { test("bind named parameters") { val sqlText = @@ -607,4 +608,19 @@ class ParametersSuite extends QueryTest with SharedSparkSession { callSitePattern = getCurrentClassCallSitePattern) ) } + + test("SPARK-46481: Test variable folding") { + sql("DECLARE a INT = 1") + sql("SET VAR a = 1") + val expected = sql("SELECT 42 WHERE 1 = 1").queryExecution.optimizedPlan + val variableDirectly = sql("SELECT 42 WHERE 1 = a").queryExecution.optimizedPlan + val parameterizedSpark = + spark.sql("SELECT 42 WHERE 1 = ?", Array(1)).queryExecution.optimizedPlan + val parameterizedSql = + spark.sql("EXECUTE IMMEDIATE 'SELECT 42 WHERE 1 = ?' USING a").queryExecution.optimizedPlan + + comparePlans(expected, variableDirectly) + comparePlans(expected, parameterizedSpark) + comparePlans(expected, parameterizedSql) + } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org