This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e2cd71a4cd54 [SPARK-47059][SQL] Attach error context for ALTER COLUMN 
v1 command
e2cd71a4cd54 is described below

commit e2cd71a4cd54bbdf5af76d3edfbb2fc8c1b067b6
Author: Wenchen Fan <wenc...@databricks.com>
AuthorDate: Thu Feb 15 18:36:11 2024 +0300

    [SPARK-47059][SQL] Attach error context for ALTER COLUMN v1 command
    
    ### What changes were proposed in this pull request?
    
    This is a small fix to improve the error message for ALTER COLUMN. We 
attach the error context for v1 command as well, making it consistent with v2 
command.
    
    ### Why are the changes needed?
    
    better error message
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    updated tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #45121 from cloud-fan/context.
    
    Authored-by: Wenchen Fan <wenc...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/errors/QueryCompilationErrors.scala  |  7 +++++--
 .../apache/spark/sql/execution/command/ddl.scala   |  2 +-
 .../analyzer-results/change-column.sql.out         |  9 +++++++-
 .../sql-tests/analyzer-results/charvarchar.sql.out |  9 +++++++-
 .../sql-tests/results/change-column.sql.out        |  9 +++++++-
 .../sql-tests/results/charvarchar.sql.out          |  9 +++++++-
 .../execution/command/CharVarcharDDLTestBase.scala | 24 ++++++----------------
 7 files changed, 44 insertions(+), 25 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 46028817e8eb..53338f38ed6d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -2637,7 +2637,8 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
   def alterTableChangeColumnNotSupportedForColumnTypeError(
       tableName: String,
       originColumn: StructField,
-      newColumn: StructField): Throwable = {
+      newColumn: StructField,
+      origin: Origin): Throwable = {
     new AnalysisException(
       errorClass = "NOT_SUPPORTED_CHANGE_COLUMN",
       messageParameters = Map(
@@ -2645,7 +2646,9 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
         "originName" -> toSQLId(originColumn.name),
         "originType" -> toSQLType(originColumn.dataType),
         "newName" -> toSQLId(newColumn.name),
-        "newType"-> toSQLType(newColumn.dataType)))
+        "newType"-> toSQLType(newColumn.dataType)),
+      origin = origin
+    )
   }
 
   def cannotAlterPartitionColumn(
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index dc1c5b3fd580..a5e48784ada1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -390,7 +390,7 @@ case class AlterTableChangeColumnCommand(
     // Throw an AnalysisException if the column name/dataType is changed.
     if (!columnEqual(originColumn, newColumn, resolver)) {
       throw 
QueryCompilationErrors.alterTableChangeColumnNotSupportedForColumnTypeError(
-        toSQLId(table.identifier.nameParts), originColumn, newColumn)
+        toSQLId(table.identifier.nameParts), originColumn, newColumn, 
this.origin)
     }
 
     val newDataSchema = table.dataSchema.fields.map { field =>
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/change-column.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/change-column.sql.out
index a3d4388ab84f..07edfa5e95e1 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/change-column.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/change-column.sql.out
@@ -69,7 +69,14 @@ org.apache.spark.sql.AnalysisException
     "originName" : "`a`",
     "originType" : "\"INT\"",
     "table" : "`spark_catalog`.`default`.`test_change`"
-  }
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 44,
+    "fragment" : "ALTER TABLE test_change CHANGE a TYPE STRING"
+  } ]
 }
 
 
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out
index 4f556d6dbc0b..02f09e0831d2 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/charvarchar.sql.out
@@ -133,7 +133,14 @@ org.apache.spark.sql.AnalysisException
     "originName" : "`c`",
     "originType" : "\"CHAR(5)\"",
     "table" : "`spark_catalog`.`default`.`char_tbl1`"
-  }
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 50,
+    "fragment" : "alter table char_tbl1 change column c type char(6)"
+  } ]
 }
 
 
diff --git 
a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out 
b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out
index 820834092cde..b1c0094bcfa9 100644
--- a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out
@@ -89,7 +89,14 @@ org.apache.spark.sql.AnalysisException
     "originName" : "`a`",
     "originType" : "\"INT\"",
     "table" : "`spark_catalog`.`default`.`test_change`"
-  }
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 44,
+    "fragment" : "ALTER TABLE test_change CHANGE a TYPE STRING"
+  } ]
 }
 
 
diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out 
b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
index 3ad363abd31b..8ff586516887 100644
--- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out
@@ -268,7 +268,14 @@ org.apache.spark.sql.AnalysisException
     "originName" : "`c`",
     "originType" : "\"CHAR(5)\"",
     "table" : "`spark_catalog`.`default`.`char_tbl1`"
-  }
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 50,
+    "fragment" : "alter table char_tbl1 change column c type char(6)"
+  } ]
 }
 
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CharVarcharDDLTestBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CharVarcharDDLTestBase.scala
index 12d5870309f0..9c7f37027812 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CharVarcharDDLTestBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CharVarcharDDLTestBase.scala
@@ -47,11 +47,11 @@ trait CharVarcharDDLTestBase extends QueryTest with 
SQLTestUtils {
   test("not allow to change column for char(x) to char(y), x != y") {
     withTable("t") {
       sql(s"CREATE TABLE t(i STRING, c CHAR(4)) USING $format")
-      val sql1 = "ALTER TABLE t CHANGE COLUMN c TYPE CHAR(5)"
+      val alterSQL = "ALTER TABLE t CHANGE COLUMN c TYPE CHAR(5)"
       val table = getTableName("t")
       checkError(
           exception = intercept[AnalysisException] {
-            sql(sql1)
+            sql(alterSQL)
           },
           errorClass = "NOT_SUPPORTED_CHANGE_COLUMN",
           parameters = Map(
@@ -60,10 +60,7 @@ trait CharVarcharDDLTestBase extends QueryTest with 
SQLTestUtils {
             "newName" -> "`c`",
             "originName" -> "`c`",
             "table" -> table),
-          queryContext = table match {
-            case "`spark_catalog`.`default`.`t`" => Array.empty
-            case _ => Array(ExpectedContext(fragment = sql1, start = 0, stop = 
41))
-          }
+          queryContext = Array(ExpectedContext(fragment = alterSQL, start = 0, 
stop = 41))
       )
     }
   }
@@ -84,10 +81,7 @@ trait CharVarcharDDLTestBase extends QueryTest with 
SQLTestUtils {
             "newName" -> "`c`",
             "originName" -> "`c`",
             "table" -> table),
-          queryContext = table match {
-            case "`spark_catalog`.`default`.`t`" => Array.empty
-            case _ => Array(ExpectedContext(fragment = sql1, start = 0, stop = 
41))
-          }
+          queryContext = Array(ExpectedContext(fragment = sql1, start = 0, 
stop = 41))
       )
     }
   }
@@ -108,10 +102,7 @@ trait CharVarcharDDLTestBase extends QueryTest with 
SQLTestUtils {
             "newName" -> "`i`",
             "originName" -> "`i`",
             "table" -> table),
-          queryContext = table match {
-            case "`spark_catalog`.`default`.`t`" => Array.empty
-            case _ => Array(ExpectedContext(fragment = sql1, start = 0, stop = 
41))
-          }
+          queryContext = Array(ExpectedContext(fragment = sql1, start = 0, 
stop = 41))
       )
     }
   }
@@ -140,10 +131,7 @@ trait CharVarcharDDLTestBase extends QueryTest with 
SQLTestUtils {
             "newName" -> "`c`",
             "originName" -> "`c`",
             "table" -> table),
-          queryContext = table match {
-            case "`spark_catalog`.`default`.`t`" => Array.empty
-            case _ => Array(ExpectedContext(fragment = sql1, start = 0, stop = 
44))
-          }
+          queryContext = Array(ExpectedContext(fragment = sql1, start = 0, 
stop = 44))
       )
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to