This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new c421b519438 [3.4][SPARK-41488][SQL] Assign name to 
_LEGACY_ERROR_TEMP_1176 (and 1177)
c421b519438 is described below

commit c421b51943857a2a50ffbf7fac952b4c53ffae87
Author: itholic <haejoon....@databricks.com>
AuthorDate: Wed Feb 1 18:53:11 2023 +0300

    [3.4][SPARK-41488][SQL] Assign name to _LEGACY_ERROR_TEMP_1176 (and 1177)
    
    ### What changes were proposed in this pull request?
    
    backport for https://github.com/apache/spark/pull/39705
    
    ### Why are the changes needed?
    
    We should want to include PRs related to error classes into Spark 3.4.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    The CI should pass.
    
    Closes #39833 from itholic/41488-3.4.
    
    Lead-authored-by: itholic <haejoon....@databricks.com>
    Co-authored-by: Runyao Chen <runyao.c...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |  40 ++++++---
 .../spark/sql/catalyst/analysis/Analyzer.scala     |   2 +-
 .../spark/sql/catalyst/parser/AstBuilder.scala     |   2 +
 .../spark/sql/errors/QueryCompilationErrors.scala  |  11 +--
 .../spark/sql/errors/QueryParsingErrors.scala      |  21 +++++
 .../sql-tests/inputs/columnresolution-negative.sql |  14 +++
 .../results/columnresolution-negative.sql.out      | 100 +++++++++++++++++++++
 .../spark/sql/errors/QueryParsingErrorsSuite.scala |  72 +++++++++++++++
 .../spark/sql/execution/SQLViewTestSuite.scala     |   6 +-
 9 files changed, 248 insertions(+), 20 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 1cad00ad417..89eeeea8c2a 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -581,6 +581,35 @@
       "Detected an incompatible DataSourceRegister. Please remove the 
incompatible library from classpath or upgrade it. Error: <message>"
     ]
   },
+  "INCOMPATIBLE_VIEW_SCHEMA_CHANGE" : {
+    "message" : [
+      "The SQL query of view <viewName> has an incompatible schema change and 
column <colName> cannot be resolved. Expected <expectedNum> columns named 
<colName> but got <actualCols>.",
+      "Please try to re-create the view by running: <suggestion>."
+    ]
+  },
+  "INCOMPLETE_TYPE_DEFINITION" : {
+    "message" : [
+      "Incomplete complex type:"
+    ],
+    "subClass" : {
+      "ARRAY" : {
+        "message" : [
+          "The definition of \"ARRAY\" type is incomplete. You must provide an 
element type. For example: \"ARRAY<elementType>\"."
+        ]
+      },
+      "MAP" : {
+        "message" : [
+          "The definition of \"MAP\" type is incomplete. You must provide a 
key type and a value type. For example: \"MAP<TIMESTAMP, INT>\"."
+        ]
+      },
+      "STRUCT" : {
+        "message" : [
+          "The definition of \"STRUCT\" type is incomplete. You must provide 
at least one field type. For example: \"STRUCT<Field1: INT>\"."
+        ]
+      }
+    },
+    "sqlState" : "42K01"
+  },
   "INCONSISTENT_BEHAVIOR_CROSS_VERSION" : {
     "message" : [
       "You may get a different result due to the upgrading to"
@@ -2773,17 +2802,6 @@
       "Unsupported data type <dataType>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1176" : {
-    "message" : [
-      "The SQL query of view <viewName> has an incompatible schema change and 
column <colName> cannot be resolved. Expected <expectedNum> columns named 
<colName> but got <actualCols>.",
-      "Please try to re-create the view by running: <viewDDL>."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_1177" : {
-    "message" : [
-      "The SQL query of view <viewName> has an incompatible schema change and 
column <colName> cannot be resolved. Expected <expectedNum> columns named 
<colName> but got <actualCols>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1178" : {
     "message" : [
       "The number of partitions can't be specified with unspecified 
distribution. Invalid writer requirements detected."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 7a92c46577d..bf66afffdde 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -2092,7 +2092,7 @@ class Analyzer(override val catalogManager: 
CatalogManager)
           val attrCandidates = getAttrCandidates()
           val matched = attrCandidates.filter(a => resolver(a.name, colName))
           if (matched.length != expectedNumCandidates) {
-            throw QueryCompilationErrors.incompatibleViewSchemaChange(
+            throw QueryCompilationErrors.incompatibleViewSchemaChangeError(
               viewName, colName, expectedNumCandidates, matched, viewDDL)
           }
           matched(ordinal)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index c6e50f3f514..d2a1cb1eb16 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -2889,6 +2889,8 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] 
with SQLConfHelper wit
       case ("interval", Nil) => CalendarIntervalType
       case (dt @ ("character" | "char" | "varchar"), Nil) =>
         throw QueryParsingErrors.charTypeMissingLengthError(dt, ctx)
+      case (dt @ ("array" | "struct" | "map"), Nil) =>
+        throw QueryParsingErrors.nestedTypeMissingElementTypeError(dt, ctx)
       case (dt, params) =>
         val dtStr = if (params.nonEmpty) s"$dt(${params.mkString(",")})" else 
dt
         throw QueryParsingErrors.dataTypeUnsupportedError(dtStr, ctx)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index ff53588a215..ae094e7f577 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1756,7 +1756,7 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map("dataType" -> field.dataType.catalogString))
   }
 
-  def incompatibleViewSchemaChange(
+  def incompatibleViewSchemaChangeError(
       viewName: String,
       colName: String,
       expectedNum: Int,
@@ -1764,21 +1764,22 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       viewDDL: Option[String]): Throwable = {
     viewDDL.map { v =>
       new AnalysisException(
-        errorClass = "_LEGACY_ERROR_TEMP_1176",
+        errorClass = "INCOMPATIBLE_VIEW_SCHEMA_CHANGE",
         messageParameters = Map(
           "viewName" -> viewName,
           "colName" -> colName,
           "expectedNum" -> expectedNum.toString,
           "actualCols" -> actualCols.map(_.name).mkString("[", ",", "]"),
-          "viewDDL" -> v))
+          "suggestion" -> v))
     }.getOrElse {
       new AnalysisException(
-        errorClass = "_LEGACY_ERROR_TEMP_1177",
+        errorClass = "INCOMPATIBLE_VIEW_SCHEMA_CHANGE",
         messageParameters = Map(
           "viewName" -> viewName,
           "colName" -> colName,
           "expectedNum" -> expectedNum.toString,
-          "actualCols" -> actualCols.map(_.name).mkString("[", ",", "]")))
+          "actualCols" -> actualCols.map(_.name).mkString("[", ",", "]"),
+          "suggestion" -> "CREATE OR REPLACE TEMPORARY VIEW"))
     }
   }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 29766251abd..e54bbb9c9d1 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -292,6 +292,27 @@ private[sql] object QueryParsingErrors extends 
QueryErrorsBase {
       ctx)
   }
 
+  def nestedTypeMissingElementTypeError(
+      dataType: String, ctx: PrimitiveDataTypeContext): Throwable = {
+    dataType match {
+      case "array" =>
+        new ParseException(
+          errorClass = "INCOMPLETE_TYPE_DEFINITION.ARRAY",
+          messageParameters = Map("elementType" -> "<INT>"),
+          ctx)
+      case "struct" =>
+        new ParseException(
+          errorClass = "INCOMPLETE_TYPE_DEFINITION.STRUCT",
+          messageParameters = Map.empty,
+          ctx)
+      case "map" =>
+        new ParseException(
+          errorClass = "INCOMPLETE_TYPE_DEFINITION.MAP",
+          messageParameters = Map.empty,
+          ctx)
+    }
+  }
+
   def partitionTransformNotExpectedError(
       name: String, describe: String, ctx: ApplyTransformContext): Throwable = 
{
     new ParseException(
diff --git 
a/sql/core/src/test/resources/sql-tests/inputs/columnresolution-negative.sql 
b/sql/core/src/test/resources/sql-tests/inputs/columnresolution-negative.sql
index 1caa45c6674..d100023b4ee 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/columnresolution-negative.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/columnresolution-negative.sql
@@ -31,6 +31,20 @@ SELECT t1 FROM mydb1.t1;
 USE mydb2;
 SELECT mydb1.t1.i1 FROM t1;
 
+-- Negative tests: view cannot resolve column after incompatible schema change
+USE mydb1;
+CREATE VIEW v1 AS SELECT * FROM t1;
+DROP TABLE t1;
+CREATE TABLE t1 USING parquet AS SELECT 1 AS i2;
+SELECT * FROM v1;
+
+-- Negative tests: temp view cannot resolve column after incompatible schema 
change
+USE mydb2;
+CREATE TEMP VIEW v2 AS SELECT * FROM t1;
+DROP TABLE t1;
+CREATE TABLE t1 USING parquet AS SELECT 1 AS i2;
+SELECT * FROM v2;
+
 -- reset
 DROP DATABASE mydb1 CASCADE;
 DROP DATABASE mydb2 CASCADE;
diff --git 
a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
index 773b5c3be8f..2f47b14d538 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
@@ -416,6 +416,106 @@ org.apache.spark.sql.AnalysisException
 }
 
 
+-- !query
+USE mydb1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE VIEW v1 AS SELECT * FROM t1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+DROP TABLE t1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE TABLE t1 USING parquet AS SELECT 1 AS i2
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT * FROM v1
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INCOMPATIBLE_VIEW_SCHEMA_CHANGE",
+  "messageParameters" : {
+    "actualCols" : "[]",
+    "colName" : "i1",
+    "expectedNum" : "1",
+    "suggestion" : "CREATE OR REPLACE VIEW spark_catalog.mydb1.v1  AS SELECT * 
FROM t1",
+    "viewName" : "`spark_catalog`.`mydb1`.`v1`"
+  }
+}
+
+
+-- !query
+USE mydb2
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE TEMP VIEW v2 AS SELECT * FROM t1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+DROP TABLE t1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE TABLE t1 USING parquet AS SELECT 1 AS i2
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT * FROM v2
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "INCOMPATIBLE_VIEW_SCHEMA_CHANGE",
+  "messageParameters" : {
+    "actualCols" : "[]",
+    "colName" : "i1",
+    "expectedNum" : "1",
+    "suggestion" : "CREATE OR REPLACE TEMPORARY VIEW",
+    "viewName" : "`v2`"
+  }
+}
+
+
 -- !query
 DROP DATABASE mydb1 CASCADE
 -- !query schema
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
index 71483534d40..b30998b6aa0 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
@@ -546,4 +546,76 @@ class QueryParsingErrorsSuite extends QueryTest with 
SharedSparkSession {
         start = 0,
         stop = 124))
   }
+
+  test("INCOMPLETE_TYPE_DEFINITION: array type definition is incomplete") {
+    // Cast simple array without specifying element type
+    checkError(
+      exception = parseException("SELECT CAST(array(1,2,3) AS ARRAY)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.ARRAY",
+      sqlState = "42K01",
+      parameters = Map("elementType" -> "<INT>"),
+      context = ExpectedContext(fragment = "ARRAY", start = 28, stop = 32))
+    // Cast array of array without specifying element type for inner array
+    checkError(
+      exception = parseException("SELECT CAST(array(array(3)) AS 
ARRAY<ARRAY>)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.ARRAY",
+      sqlState = "42K01",
+      parameters = Map("elementType" -> "<INT>"),
+      context = ExpectedContext(fragment = "ARRAY", start = 37, stop = 41))
+    // Create column of array type without specifying element type
+    checkError(
+      exception = parseException("CREATE TABLE tbl_120691 (col1 ARRAY)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.ARRAY",
+      sqlState = "42K01",
+      parameters = Map("elementType" -> "<INT>"),
+      context = ExpectedContext(fragment = "ARRAY", start = 30, stop = 34))
+  }
+
+  test("INCOMPLETE_TYPE_DEFINITION: struct type definition is incomplete") {
+    // Cast simple struct without specifying field type
+    checkError(
+      exception = parseException("SELECT CAST(struct(1,2,3) AS STRUCT)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.STRUCT",
+      sqlState = "42K01",
+      context = ExpectedContext(fragment = "STRUCT", start = 29, stop = 34))
+    // Cast array of struct without specifying field type in struct
+    checkError(
+      exception = parseException("SELECT CAST(array(struct(1,2)) AS 
ARRAY<STRUCT>)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.STRUCT",
+      sqlState = "42K01",
+      context = ExpectedContext(fragment = "STRUCT", start = 40, stop = 45))
+    // Create column of struct type without specifying field type
+    checkError(
+      exception = parseException("CREATE TABLE tbl_120691 (col1 STRUCT)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.STRUCT",
+      sqlState = "42K01",
+      context = ExpectedContext(fragment = "STRUCT", start = 30, stop = 35))
+    // Invalid syntax `STRUCT<INT>` without field name
+    checkError(
+      exception = parseException("SELECT CAST(struct(1,2,3) AS STRUCT<INT>)"),
+      errorClass = "PARSE_SYNTAX_ERROR",
+      sqlState = "42601",
+      parameters = Map("error" -> "'>'", "hint" -> ""))
+  }
+
+  test("INCOMPLETE_TYPE_DEFINITION: map type definition is incomplete") {
+    // Cast simple map without specifying element type
+    checkError(
+      exception = parseException("SELECT CAST(map(1,'2') AS MAP)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.MAP",
+      sqlState = "42K01",
+      context = ExpectedContext(fragment = "MAP", start = 26, stop = 28))
+    // Create column of map type without specifying key/value types
+    checkError(
+      exception = parseException("CREATE TABLE tbl_120691 (col1 MAP)"),
+      errorClass = "INCOMPLETE_TYPE_DEFINITION.MAP",
+      sqlState = "42K01",
+      context = ExpectedContext(fragment = "MAP", start = 30, stop = 32))
+    // Invalid syntax `MAP<String>` with only key type
+    checkError(
+      exception = parseException("SELECT CAST(map('1',2) AS MAP<STRING>)"),
+      errorClass = "PARSE_SYNTAX_ERROR",
+      sqlState = "42601",
+      parameters = Map("error" -> "'>'", "hint" -> ""))
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
index 15d14714e7d..1d4c52d3ae5 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
@@ -586,15 +586,15 @@ class PersistedViewTestSuite extends SQLViewTestSuite 
with SharedSparkSession {
         }
         checkError(
           exception = e,
-          errorClass = "_LEGACY_ERROR_TEMP_1176",
+          errorClass = "INCOMPATIBLE_VIEW_SCHEMA_CHANGE",
           parameters = Map(
             "viewName" -> "`spark_catalog`.`default`.`test_view`",
-            "viewDDL" ->
+            "suggestion" ->
               "CREATE OR REPLACE VIEW spark_catalog.default.test_view  AS 
SELECT * FROM t",
             "actualCols" -> "[]", "colName" -> "col_j",
             "expectedNum" -> "1")
         )
-        val ddl = e.getMessageParameters.get("viewDDL")
+        val ddl = e.getMessageParameters.get("suggestion")
         sql(ddl)
         checkAnswer(sql("select * FROM test_view"), Row(1))
       }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to