This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0ae82d99d13 [SPARK-41181][SQL] Migrate the map options errors onto 
error classes
0ae82d99d13 is described below

commit 0ae82d99d13988086a297920d45a766115a70578
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Fri Nov 25 09:03:49 2022 +0300

    [SPARK-41181][SQL] Migrate the map options errors onto error classes
    
    ### What changes were proposed in this pull request?
    The pr aims to migrate the map options errors onto error classes.
    
    ### Why are the changes needed?
    The changes improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    Closes #38730 from panbingkun/SPARK-41181.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 27 +++++----
 .../spark/sql/errors/QueryCompilationErrors.scala  |  6 +-
 .../sql-tests/results/csv-functions.sql.out        | 13 +++--
 .../sql-tests/results/json-functions.sql.out       | 12 ++--
 .../org/apache/spark/sql/JsonFunctionsSuite.scala  | 66 ++++++++++++++++------
 5 files changed, 81 insertions(+), 43 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 55a56712554..1246e870e0d 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -735,6 +735,23 @@
       "The <joinType> JOIN with LATERAL correlation is not allowed because an 
OUTER subquery cannot correlate to its join partner. Remove the LATERAL 
correlation or use an INNER JOIN, or LEFT OUTER JOIN instead."
     ]
   },
+  "INVALID_OPTIONS" : {
+    "message" : [
+      "Invalid options:"
+    ],
+    "subClass" : {
+      "NON_MAP_FUNCTION" : {
+        "message" : [
+          "Must use the `map()` function for options."
+        ]
+      },
+      "NON_STRING_TYPE" : {
+        "message" : [
+          "A type of keys and values in `map()` must be string, but got 
<mapType>."
+        ]
+      }
+    }
+  },
   "INVALID_PANDAS_UDF_PLACEMENT" : {
     "message" : [
       "The group aggregate pandas UDF <functionList> cannot be invoked 
together with as other, non-pandas aggregate functions."
@@ -2190,16 +2207,6 @@
       "Schema should be struct type but got <dataType>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1095" : {
-    "message" : [
-      "A type of keys and values in map() must be string, but got <map>."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_1096" : {
-    "message" : [
-      "Must use a map() function for options."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1097" : {
     "message" : [
       "The field for corrupt records must be string type and nullable."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index fa22c36f841..486bd21b844 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1013,13 +1013,13 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
 
   def keyValueInMapNotStringError(m: CreateMap): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1095",
-      messageParameters = Map("map" -> m.dataType.catalogString))
+      errorClass = "INVALID_OPTIONS.NON_STRING_TYPE",
+      messageParameters = Map("mapType" -> toSQLType(m.dataType)))
   }
 
   def nonMapFunctionNotAllowedError(): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1096",
+      errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION",
       messageParameters = Map.empty)
   }
 
diff --git 
a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
index 0b5a63c28e4..200ddd837e1 100644
--- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
@@ -66,7 +66,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1096",
+  "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
   "queryContext" : [ {
     "objectType" : "",
     "objectName" : "",
@@ -84,9 +84,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1095",
+  "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
   "messageParameters" : {
-    "map" : "map<string,int>"
+    "mapType" : "\"MAP<STRING, INT>\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -222,7 +222,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1096",
+  "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
   "queryContext" : [ {
     "objectType" : "",
     "objectName" : "",
@@ -233,6 +233,7 @@ org.apache.spark.sql.AnalysisException
 }
 
 
+
 -- !query
 select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1))
 -- !query schema
@@ -240,9 +241,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1095",
+  "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
   "messageParameters" : {
-    "map" : "map<string,int>"
+    "mapType" : "\"MAP<STRING, INT>\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
index ab1465350d8..a9c4dd0b9fd 100644
--- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
@@ -70,7 +70,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1096",
+  "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
   "queryContext" : [ {
     "objectType" : "",
     "objectName" : "",
@@ -88,9 +88,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1095",
+  "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
   "messageParameters" : {
-    "map" : "map<string,int>"
+    "mapType" : "\"MAP<STRING, INT>\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -192,7 +192,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1096",
+  "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
   "queryContext" : [ {
     "objectType" : "",
     "objectName" : "",
@@ -210,9 +210,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_1095",
+  "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
   "messageParameters" : {
-    "map" : "map<string,int>"
+    "mapType" : "\"MAP<STRING, INT>\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
index d2c6055fe36..56bdefc98ba 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
@@ -395,16 +395,31 @@ class JsonFunctionsSuite extends QueryTest with 
SharedSparkSession {
       df2.selectExpr("to_json(a, map('timestampFormat', 'dd/MM/yyyy HH:mm'))"),
       Row("""{"_1":"26/08/2015 18:00"}""") :: Nil)
 
-    val errMsg1 = intercept[AnalysisException] {
-      df2.selectExpr("to_json(a, named_struct('a', 1))")
-    }
-    assert(errMsg1.getMessage.startsWith("Must use a map() function for 
options"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        df2.selectExpr("to_json(a, named_struct('a', 1))")
+      },
+      errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION",
+      parameters = Map.empty,
+      context = ExpectedContext(
+        fragment = "to_json(a, named_struct('a', 1))",
+        start = 0,
+        stop = 31
+      )
+    )
 
-    val errMsg2 = intercept[AnalysisException] {
-      df2.selectExpr("to_json(a, map('a', 1))")
-    }
-    assert(errMsg2.getMessage.startsWith(
-      "A type of keys and values in map() must be string, but got"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        df2.selectExpr("to_json(a, map('a', 1))")
+      },
+      errorClass = "INVALID_OPTIONS.NON_STRING_TYPE",
+      parameters = Map("mapType" -> "\"MAP<STRING, INT>\""),
+      context = ExpectedContext(
+        fragment = "to_json(a, map('a', 1))",
+        start = 0,
+        stop = 22
+      )
+    )
   }
 
   test("SPARK-19967 Support from_json in SQL") {
@@ -441,15 +456,30 @@ class JsonFunctionsSuite extends QueryTest with 
SharedSparkSession {
       df3.selectExpr("""from_json(value, 'time InvalidType')""")
     }
     assert(errMsg2.getMessage.contains("DataType invalidtype is not 
supported"))
-    val errMsg3 = intercept[AnalysisException] {
-      df3.selectExpr("from_json(value, 'time Timestamp', named_struct('a', 
1))")
-    }
-    assert(errMsg3.getMessage.startsWith("Must use a map() function for 
options"))
-    val errMsg4 = intercept[AnalysisException] {
-      df3.selectExpr("from_json(value, 'time Timestamp', map('a', 1))")
-    }
-    assert(errMsg4.getMessage.startsWith(
-      "A type of keys and values in map() must be string, but got"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        df3.selectExpr("from_json(value, 'time Timestamp', named_struct('a', 
1))")
+      },
+      errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION",
+      parameters = Map.empty,
+      context = ExpectedContext(
+        fragment = "from_json(value, 'time Timestamp', named_struct('a', 1))",
+        start = 0,
+        stop = 55
+      )
+    )
+    checkError(
+      exception = intercept[AnalysisException] {
+        df3.selectExpr("from_json(value, 'time Timestamp', map('a', 1))")
+      },
+      errorClass = "INVALID_OPTIONS.NON_STRING_TYPE",
+      parameters = Map("mapType" -> "\"MAP<STRING, INT>\""),
+      context = ExpectedContext(
+        fragment = "from_json(value, 'time Timestamp', map('a', 1))",
+        start = 0,
+        stop = 46
+      )
+    )
   }
 
   test("SPARK-24027: from_json - map<string, int>") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to