This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b63674ea5f7 [SPARK-39078][SQL] Fix a bug in UPDATE commands with 
DEFAULT values
b63674ea5f7 is described below

commit b63674ea5f746306a96ab8c39c23a230a6cb9566
Author: Daniel Tenedorio <daniel.tenedo...@databricks.com>
AuthorDate: Fri May 13 00:46:29 2022 +0800

    [SPARK-39078][SQL] Fix a bug in UPDATE commands with DEFAULT values
    
    ### What changes were proposed in this pull request?
    
    Fix a bug in UPDATE commands with DEFAULT values.
    
    Specifically, the previous implementation of the 
`mapStructFieldNamesToExpressions` method associated column names with default 
values in the order of the keys in `Map[String, StructField]`. Since that's not 
always true, this PR updates that method to consume these columns in the order 
they actually appear in the target table schema instead.
    
    ### Why are the changes needed?
    
    The bug is fixed with this PR.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    An extra test exercises an error case that failed before, but is now fixed.
    
    Closes #36514 from dtenedor/default-update-fix-bug.
    
    Authored-by: Daniel Tenedorio <daniel.tenedo...@databricks.com>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../catalyst/analysis/ResolveDefaultColumns.scala  |  7 +++---
 .../execution/command/PlanResolutionSuite.scala    | 26 ++++++++++++++++++++++
 2 files changed, 30 insertions(+), 3 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveDefaultColumns.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveDefaultColumns.scala
index 9612713f593..80861962f06 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveDefaultColumns.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveDefaultColumns.scala
@@ -431,9 +431,10 @@ case class ResolveDefaultColumns(
   private def mapStructFieldNamesToExpressions(
       schema: StructType,
       expressions: Seq[Expression]): Map[String, Expression] = {
-    val namesToFields: Map[String, StructField] = 
mapStructFieldNamesToFields(schema)
-    val namesAndExpressions: Seq[(String, Expression)] = 
namesToFields.keys.toSeq.zip(expressions)
-    namesAndExpressions.toMap
+    schema.fields.zip(expressions).map {
+      case (field: StructField, expression: Expression) =>
+        normalizeFieldName(field.name) -> expression
+    }.toMap
   }
 
   /**
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
index 84b900f4cd7..7e86bd01912 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
@@ -101,6 +101,19 @@ class PlanResolutionSuite extends AnalysisTest {
     t
   }
 
+  private val defaultValues2: Table = {
+    val t = mock(classOf[Table])
+    when(t.schema()).thenReturn(
+      new StructType()
+        .add("i", StringType)
+        .add("e", StringType, true,
+          new MetadataBuilder()
+            
.putString(ResolveDefaultColumns.CURRENT_DEFAULT_COLUMN_METADATA_KEY, "'abc'")
+            
.putString(ResolveDefaultColumns.EXISTS_DEFAULT_COLUMN_METADATA_KEY, 
"'abc'").build()))
+    when(t.partitioning()).thenReturn(Array.empty[Transform])
+    t
+  }
+
   private val v1Table: V1Table = {
     val t = mock(classOf[CatalogTable])
     when(t.schema).thenReturn(new StructType()
@@ -137,6 +150,7 @@ class PlanResolutionSuite extends AnalysisTest {
         case "tab2" => table2
         case "charvarchar" => charVarcharTable
         case "defaultvalues" => defaultValues
+        case "defaultvalues2" => defaultValues2
         case name => throw new NoSuchTableException(name)
       }
     })
@@ -998,6 +1012,7 @@ class PlanResolutionSuite extends AnalysisTest {
       val sql6 = s"UPDATE $tblName SET i=DEFAULT, s=DEFAULT"
       val sql7 = s"UPDATE defaultvalues SET i=DEFAULT, s=DEFAULT"
       val sql8 = s"UPDATE $tblName SET name='Robert', age=32 WHERE p=DEFAULT"
+      val sql9 = s"UPDATE defaultvalues2 SET i=DEFAULT"
 
       val parsed1 = parseAndResolve(sql1)
       val parsed2 = parseAndResolve(sql2)
@@ -1006,6 +1021,7 @@ class PlanResolutionSuite extends AnalysisTest {
       val parsed5 = parseAndResolve(sql5)
       val parsed6 = parseAndResolve(sql6)
       val parsed7 = parseAndResolve(sql7, true)
+      val parsed9 = parseAndResolve(sql9, true)
 
       parsed1 match {
         case UpdateTable(
@@ -1109,6 +1125,16 @@ class PlanResolutionSuite extends AnalysisTest {
         parseAndResolve(sql8)
       }.getMessage.contains(
         
QueryCompilationErrors.defaultReferencesNotAllowedInUpdateWhereClause().getMessage))
+
+      parsed9 match {
+        case UpdateTable(
+        _,
+        Seq(Assignment(i: AttributeReference, AnsiCast(Literal(null, _), 
StringType, _))),
+        None) =>
+          assert(i.name == "i")
+
+        case _ => fail("Expect UpdateTable, but got:\n" + parsed9.treeString)
+      }
     }
 
     val sql1 = "UPDATE non_existing SET id=1"


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to