This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f373df8a757 [SPARK-42158][SQL] Integrate `_LEGACY_ERROR_TEMP_1003` 
into `FIELD_NOT_FOUND`
f373df8a757 is described below

commit f373df8a757e36ea84275c637087045d6cca3939
Author: itholic <haejoon....@databricks.com>
AuthorDate: Fri Jan 27 10:40:47 2023 +0300

    [SPARK-42158][SQL] Integrate `_LEGACY_ERROR_TEMP_1003` into 
`FIELD_NOT_FOUND`
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to integrate `_LEGACY_ERROR_TEMP_1003` into 
`FIELD_NOT_FOUND`
    
    ### Why are the changes needed?
    
    We should deduplicate the similar error classes into single error class by 
merging them.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Fixed exiting UTs.
    
    Closes #39706 from itholic/LEGACY_1003.
    
    Authored-by: itholic <haejoon....@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |  5 --
 .../spark/sql/catalyst/analysis/Analyzer.scala     |  3 +-
 .../spark/sql/errors/QueryCompilationErrors.scala  |  8 ++-
 .../spark/sql/connector/AlterTableTests.scala      | 18 +++++-
 .../connector/V2CommandsCaseSensitivitySuite.scala | 64 +++++++++++++++-------
 5 files changed, 65 insertions(+), 33 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 5d2e184874a..e6876751a22 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -2031,11 +2031,6 @@
       "Try moving this class out of its parent class."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1003" : {
-    "message" : [
-      "Couldn't find the reference column for <after> at <parentName>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1004" : {
     "message" : [
       "Window specification <windowName> is not defined in the WINDOW clause."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index f0c22471afa..6f27c97ddf9 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -4053,9 +4053,8 @@ class Analyzer(override val catalogManager: 
CatalogManager)
                   case Some(colName) =>
                     ResolvedFieldPosition(ColumnPosition.after(colName))
                   case None =>
-                    val name = if (resolvedParentName.isEmpty) "root" else 
resolvedParentName.quoted
                     throw 
QueryCompilationErrors.referenceColNotFoundForAlterTableChangesError(
-                      after, name)
+                      col.colName, allFields)
                 }
               case _ => ResolvedFieldPosition(u.position)
             }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index c415fb91c5d..1a8c42b599e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -295,10 +295,12 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
   }
 
   def referenceColNotFoundForAlterTableChangesError(
-      after: TableChange.After, parentName: String): Throwable = {
+      fieldName: String, fields: Array[String]): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1003",
-      messageParameters = Map("after" -> after.toString, "parentName" -> 
parentName))
+      errorClass = "FIELD_NOT_FOUND",
+      messageParameters = Map(
+        "fieldName" -> toSQLId(fieldName),
+        "fields" -> fields.mkString(", ")))
   }
 
   def windowSpecificationNotDefinedError(windowName: String): Throwable = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala
index b69a0628f3e..2047212a4ea 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTableTests.scala
@@ -160,7 +160,11 @@ trait AlterTableTests extends SharedSparkSession with 
QueryErrorsBase {
 
       val e1 = intercept[AnalysisException](
         sql(s"ALTER TABLE $t ADD COLUMN c string AFTER non_exist"))
-      assert(e1.getMessage().contains("Couldn't find the reference column"))
+      checkError(
+        exception = e1,
+        errorClass = "FIELD_NOT_FOUND",
+        parameters = Map("fieldName" -> "`c`", "fields" -> "a, point, b")
+      )
 
       sql(s"ALTER TABLE $t ADD COLUMN point.y int FIRST")
       assert(getTableMetadata(tableName).schema == new StructType()
@@ -181,7 +185,11 @@ trait AlterTableTests extends SharedSparkSession with 
QueryErrorsBase {
 
       val e2 = intercept[AnalysisException](
         sql(s"ALTER TABLE $t ADD COLUMN point.x2 int AFTER non_exist"))
-      assert(e2.getMessage().contains("Couldn't find the reference column"))
+      checkError(
+        exception = e2,
+        errorClass = "FIELD_NOT_FOUND",
+        parameters = Map("fieldName" -> "`x2`", "fields" -> "y, x, z")
+      )
     }
   }
 
@@ -218,7 +226,11 @@ trait AlterTableTests extends SharedSparkSession with 
QueryErrorsBase {
       // The new column being referenced should come before being referenced.
       val e = intercept[AnalysisException](
         sql(s"ALTER TABLE $t ADD COLUMNS (yy int AFTER xx, xx int)"))
-      assert(e.getMessage().contains("Couldn't find the reference column for 
AFTER xx at root"))
+      checkError(
+        exception = e,
+        errorClass = "FIELD_NOT_FOUND",
+        parameters = Map("fieldName" -> "`yy`", "fields" -> "a, x, y, z, b, 
point")
+      )
     }
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/V2CommandsCaseSensitivitySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/V2CommandsCaseSensitivitySuite.scala
index ffe8bbb2833..44cd4f0f9b3 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/V2CommandsCaseSensitivitySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/V2CommandsCaseSensitivitySuite.scala
@@ -160,8 +160,7 @@ class V2CommandsCaseSensitivitySuite
 
   test("AlterTable: add column resolution - positional") {
     Seq("ID", "iD").foreach { ref =>
-      alterTableTest(
-        AddColumns(
+      val alter = AddColumns(
           table,
           Seq(QualifiedColType(
             None,
@@ -170,15 +169,21 @@ class V2CommandsCaseSensitivitySuite
             true,
             None,
             Some(UnresolvedFieldPosition(ColumnPosition.after(ref))),
-            None))),
-        Seq("reference column", ref)
-      )
+            None)))
+      Seq(true, false).foreach { caseSensitive =>
+        withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
+          assertAnalysisErrorClass(
+            inputPlan = alter,
+            expectedErrorClass = "FIELD_NOT_FOUND",
+            expectedMessageParameters = Map("fieldName" -> "`f`", "fields" -> 
"id, data, point")
+          )
+        }
+      }
     }
   }
 
   test("AlterTable: add column resolution - column position referencing new 
column") {
-    alterTableTest(
-      AddColumns(
+    val alter = AddColumns(
         table,
         Seq(QualifiedColType(
           None,
@@ -195,15 +200,21 @@ class V2CommandsCaseSensitivitySuite
           true,
           None,
           Some(UnresolvedFieldPosition(ColumnPosition.after("X"))),
-          None))),
-      Seq("Couldn't find the reference column for AFTER X at root")
-    )
+          None)))
+    Seq(true, false).foreach { caseSensitive =>
+      withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
+        assertAnalysisErrorClass(
+          inputPlan = alter,
+          expectedErrorClass = "FIELD_NOT_FOUND",
+          expectedMessageParameters = Map("fieldName" -> "`y`", "fields" -> 
"id, data, point, x")
+        )
+      }
+    }
   }
 
   test("AlterTable: add column resolution - nested positional") {
     Seq("X", "Y").foreach { ref =>
-      alterTableTest(
-        AddColumns(
+      val alter = AddColumns(
           table,
           Seq(QualifiedColType(
             Some(UnresolvedFieldName(Seq("point"))),
@@ -212,15 +223,21 @@ class V2CommandsCaseSensitivitySuite
             true,
             None,
             Some(UnresolvedFieldPosition(ColumnPosition.after(ref))),
-            None))),
-        Seq("reference column", ref)
-      )
+            None)))
+      Seq(true, false).foreach { caseSensitive =>
+        withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
+          assertAnalysisErrorClass(
+            inputPlan = alter,
+            expectedErrorClass = "FIELD_NOT_FOUND",
+            expectedMessageParameters = Map("fieldName" -> "`z`", "fields" -> 
"x, y")
+          )
+        }
+      }
     }
   }
 
   test("AlterTable: add column resolution - column position referencing new 
nested column") {
-    alterTableTest(
-      AddColumns(
+    val alter = AddColumns(
         table,
         Seq(QualifiedColType(
           Some(UnresolvedFieldName(Seq("point"))),
@@ -237,9 +254,16 @@ class V2CommandsCaseSensitivitySuite
           true,
           None,
           Some(UnresolvedFieldPosition(ColumnPosition.after("Z"))),
-          None))),
-      Seq("Couldn't find the reference column for AFTER Z at point")
-    )
+          None)))
+    Seq(true, false).foreach { caseSensitive =>
+      withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
+        assertAnalysisErrorClass(
+          inputPlan = alter,
+          expectedErrorClass = "FIELD_NOT_FOUND",
+          expectedMessageParameters = Map("fieldName" -> "`zz`", "fields" -> 
"x, y, z")
+        )
+      }
+    }
   }
 
   test("SPARK-36372: Adding duplicate columns should not be allowed") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to