This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 074444bd71f [SPARK-41179][SQL] Assign a name to the error class 
_LEGACY_ERROR_TEMP_1092
074444bd71f is described below

commit 074444bd71f088d1a5acb6f2ecf92d71ed06ef21
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Thu Nov 24 09:17:45 2022 +0300

    [SPARK-41179][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_1092
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to assign the name `INVALID_SCHEMA` to the error class 
`_LEGACY_ERROR_TEMP_1092`.
    
    ### Why are the changes needed?
    Proper names of error classes should improve user experience with Spark SQL.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    Closes #38710 from panbingkun/SPARK-41179.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json        | 10 +++++-----
 .../spark/sql/errors/QueryCompilationErrors.scala       |  4 ++--
 .../resources/sql-tests/results/csv-functions.sql.out   |  4 ++--
 .../resources/sql-tests/results/json-functions.sql.out  |  4 ++--
 .../scala/org/apache/spark/sql/CsvFunctionsSuite.scala  | 11 +++++++----
 .../org/apache/spark/sql/DataFrameFunctionsSuite.scala  | 17 +++++++++++++++++
 .../scala/org/apache/spark/sql/JsonFunctionsSuite.scala | 17 +++++++++++++----
 7 files changed, 48 insertions(+), 19 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index a89fffde51d..c58f9b9fb38 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -756,6 +756,11 @@
       "<protobufClassName> is not a Protobuf message type"
     ]
   },
+  "INVALID_SCHEMA" : {
+    "message" : [
+      "The expression <expr> is not a valid schema string."
+    ]
+  },
   "INVALID_SQL_SYNTAX" : {
     "message" : [
       "Invalid SQL syntax: <inputString>"
@@ -2170,11 +2175,6 @@
       "Cannot read table property '<key>' as it's corrupted.<details>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1092" : {
-    "message" : [
-      "The expression '<expr>' is not a valid schema string."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1093" : {
     "message" : [
       "Schema should be specified in DDL format as a string literal or output 
of the schema_of_json/schema_of_csv functions instead of <expr>."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index f52a0345bce..7772dd5e9a3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -995,8 +995,8 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
 
   def invalidSchemaStringError(exp: Expression): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1092",
-      messageParameters = Map("expr" -> exp.sql))
+      errorClass = "INVALID_SCHEMA",
+      messageParameters = Map("expr" -> toSQLExpr(exp)))
   }
 
   def schemaNotFoldableError(exp: Expression): Throwable = {
diff --git 
a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
index c2be9ed7d0b..0b5a63c28e4 100644
--- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
@@ -22,9 +22,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1092",
+  "errorClass" : "INVALID_SCHEMA",
   "messageParameters" : {
-    "expr" : "1"
+    "expr" : "\"1\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
index 3c98cc6e856..ab1465350d8 100644
--- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
@@ -148,9 +148,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1092",
+  "errorClass" : "INVALID_SCHEMA",
   "messageParameters" : {
-    "expr" : "1"
+    "expr" : "\"1\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala
index 0c3703ee89e..2a3058d9395 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CsvFunctionsSuite.scala
@@ -363,10 +363,13 @@ class CsvFunctionsSuite extends QueryTest with 
SharedSparkSession {
     }.getMessage
     assert(errMsg.contains("Schema should be specified in DDL format as a 
string literal"))
 
-    val errMsg2 = intercept[AnalysisException] {
-      Seq("1").toDF("csv").select(from_csv($"csv", lit(1), options)).collect()
-    }.getMessage
-    assert(errMsg2.contains("The expression '1' is not a valid schema string"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        Seq("1").toDF("csv").select(from_csv($"csv", lit(1), 
options)).collect()
+      },
+      errorClass = "INVALID_SCHEMA",
+      parameters = Map("expr" -> "\"1\"")
+    )
   }
 
   test("schema_of_csv - infers the schema of foldable CSV string") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
index 61f671437f0..6baca3a4beb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
@@ -5200,6 +5200,23 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       Seq(Row(Map("a" -> Map("a" -> 6, "b" -> 8), "b" -> Map("a" -> 8, "b" -> 
10))))
     )
   }
+
+  test("from_json - invalid schema string") {
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("select from_json('{\"a\":1}', 1)")
+      },
+      errorClass = "INVALID_SCHEMA",
+      parameters = Map(
+        "expr" -> "\"1\""
+      ),
+      context = ExpectedContext(
+        fragment = "from_json('{\"a\":1}', 1)",
+        start = 7,
+        stop = 29
+      )
+    )
+  }
 }
 
 object DataFrameFunctionsSuite {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
index 0a84e9000c3..d2c6055fe36 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
@@ -424,10 +424,19 @@ class JsonFunctionsSuite extends QueryTest with 
SharedSparkSession {
         "from_json(value, 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy 
HH:mm'))"),
       Row(Row(java.sql.Timestamp.valueOf("2015-08-26 18:00:00.0"))))
 
-    val errMsg1 = intercept[AnalysisException] {
-      df3.selectExpr("from_json(value, 1)")
-    }
-    assert(errMsg1.getMessage.startsWith("The expression '1' is not a valid 
schema string"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        df3.selectExpr("from_json(value, 1)")
+      },
+      errorClass = "INVALID_SCHEMA",
+      parameters = Map("expr" -> "\"1\""),
+      context = ExpectedContext(
+        fragment = "from_json(value, 1)",
+        start = 0,
+        stop = 18
+      )
+    )
+
     val errMsg2 = intercept[AnalysisException] {
       df3.selectExpr("""from_json(value, 'time InvalidType')""")
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to