This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 32d42bbe98d [SPARK-43649][SPARK-43650][SPARK-43651][SQL] Assign names 
to the error class _LEGACY_ERROR_TEMP_240[1-3]
32d42bbe98d is described below

commit 32d42bbe98da9a7e8c38b9c3187c75dbbaaaafbb
Author: Jiaan Geng <belie...@163.com>
AuthorDate: Tue May 23 12:41:06 2023 +0300

    [SPARK-43649][SPARK-43650][SPARK-43651][SQL] Assign names to the error 
class _LEGACY_ERROR_TEMP_240[1-3]
    
    ### What changes were proposed in this pull request?
    The pr aims to assign a name to the error class _LEGACY_ERROR_TEMP_240[1-3].
    
    ### Why are the changes needed?
    Improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    
    ### How was this patch tested?
    Exists test cases.
    
    Closes #41252 from beliefer/offset-limit-error-improve.
    
    Authored-by: Jiaan Geng <belie...@163.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 49 ++++++++------
 .../sql/catalyst/analysis/CheckAnalysis.scala      | 18 +++---
 .../sql/catalyst/analysis/AnalysisErrorSuite.scala | 74 ++++++++++++++++++----
 .../sql-tests/analyzer-results/limit.sql.out       | 24 ++++---
 .../analyzer-results/postgreSQL/limit.sql.out      |  8 +--
 .../test/resources/sql-tests/results/limit.sql.out | 24 ++++---
 .../sql-tests/results/postgreSQL/limit.sql.out     |  8 +--
 7 files changed, 136 insertions(+), 69 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index af0471199b7..5d19d180053 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1052,6 +1052,33 @@
     ],
     "sqlState" : "42613"
   },
+  "INVALID_LIMIT_LIKE_EXPRESSION" : {
+    "message" : [
+      "The limit like expression <expr> is invalid."
+    ],
+    "subClass" : {
+      "DATA_TYPE" : {
+        "message" : [
+          "The <name> expression must be integer type, but got <dataType>."
+        ]
+      },
+      "IS_NEGATIVE" : {
+        "message" : [
+          "The <name> expression must be equal to or greater than 0, but got 
<v>."
+        ]
+      },
+      "IS_NULL" : {
+        "message" : [
+          "The evaluated <name> expression must not be null."
+        ]
+      },
+      "IS_UNFOLDABLE" : {
+        "message" : [
+          "The <name> expression must evaluate to a constant value."
+        ]
+      }
+    }
+  },
   "INVALID_OPTIONS" : {
     "message" : [
       "Invalid options:"
@@ -1230,11 +1257,6 @@
       }
     }
   },
-  "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE" : {
-    "message" : [
-      "The <name> expression must evaluate to a constant value, but got 
<limitExpr>."
-    ]
-  },
   "LOCATION_ALREADY_EXISTS" : {
     "message" : [
       "Cannot name the managed table as <identifier>, as its associated 
location <location> already exists. Please pick a different table name, or 
remove the existing location first."
@@ -5260,21 +5282,6 @@
       "failed to evaluate expression <sqlExpr>: <msg>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_2401" : {
-    "message" : [
-      "The <name> expression must be integer type, but got <dataType>."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2402" : {
-    "message" : [
-      "The evaluated <name> expression must not be null, but got <limitExpr>."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2403" : {
-    "message" : [
-      "The <name> expression must be equal to or greater than 0, but got <v>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2404" : {
     "message" : [
       "Table <name> is not partitioned."
@@ -5673,4 +5680,4 @@
       "Failed to get block <blockId>, which is not a shuffle block"
     ]
   }
-}
+}
\ No newline at end of file
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 3240f9bee56..407a9d363f4 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -85,27 +85,29 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
   private def checkLimitLikeClause(name: String, limitExpr: Expression): Unit 
= {
     limitExpr match {
       case e if !e.foldable => limitExpr.failAnalysis(
-        errorClass = "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
+        errorClass = "INVALID_LIMIT_LIKE_EXPRESSION.IS_UNFOLDABLE",
         messageParameters = Map(
           "name" -> name,
-          "limitExpr" -> toSQLExpr(limitExpr)))
+          "expr" -> toSQLExpr(limitExpr)))
       case e if e.dataType != IntegerType => limitExpr.failAnalysis(
-        errorClass = "_LEGACY_ERROR_TEMP_2401",
+        errorClass = "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
         messageParameters = Map(
           "name" -> name,
-          "dataType" -> e.dataType.catalogString))
+          "expr" -> toSQLExpr(limitExpr),
+          "dataType" -> toSQLType(e.dataType)))
       case e =>
         e.eval() match {
           case null => limitExpr.failAnalysis(
-            errorClass = "_LEGACY_ERROR_TEMP_2402",
+            errorClass = "INVALID_LIMIT_LIKE_EXPRESSION.IS_NULL",
             messageParameters = Map(
               "name" -> name,
-              "limitExpr" -> limitExpr.sql))
+              "expr" -> toSQLExpr(limitExpr)))
           case v: Int if v < 0 => limitExpr.failAnalysis(
-            errorClass = "_LEGACY_ERROR_TEMP_2403",
+            errorClass = "INVALID_LIMIT_LIKE_EXPRESSION.IS_NEGATIVE",
             messageParameters = Map(
               "name" -> name,
-              "v" -> v.toString))
+              "expr" -> toSQLExpr(limitExpr),
+              "v" -> toSQLValue(v, IntegerType)))
           case _ => // OK
         }
     }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
index cbd6749807f..ed925019278 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
@@ -626,40 +626,90 @@ class AnalysisErrorSuite extends AnalysisTest {
     "The generator is not supported: outside the SELECT clause, found: Sort" 
:: Nil
   )
 
-  errorTest(
+  errorClassTest(
+    "an evaluated limit class must not be string",
+    testRelation.limit(Literal(UTF8String.fromString("abc"), StringType)),
+    "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
+    Map(
+      "name" -> "limit",
+      "expr" -> "\"abc\"",
+      "dataType" -> "\"STRING\""
+    )
+  )
+
+  errorClassTest(
+    "an evaluated limit class must not be long",
+    testRelation.limit(Literal(10L, LongType)),
+    "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
+    Map(
+      "name" -> "limit",
+      "expr" -> "\"10\"",
+      "dataType" -> "\"BIGINT\""
+    )
+  )
+
+  errorClassTest(
     "an evaluated limit class must not be null",
     testRelation.limit(Literal(null, IntegerType)),
-    "The evaluated limit expression must not be null, but got " :: Nil
+    "INVALID_LIMIT_LIKE_EXPRESSION.IS_NULL",
+    Map(
+      "name" -> "limit",
+      "expr" -> "\"NULL\""
+    )
   )
 
-  errorTest(
+  errorClassTest(
     "num_rows in limit clause must be equal to or greater than 0",
     listRelation.limit(-1),
-    "The limit expression must be equal to or greater than 0, but got -1" :: 
Nil
+    "INVALID_LIMIT_LIKE_EXPRESSION.IS_NEGATIVE",
+    Map(
+      "name" -> "limit",
+      "expr" -> "\"-1\"",
+      "v" -> "-1"
+    )
   )
 
-  errorTest(
+  errorClassTest(
     "an evaluated offset class must not be string",
     testRelation.offset(Literal(UTF8String.fromString("abc"), StringType)),
-    "The offset expression must be integer type, but got string" :: Nil
+    "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
+    Map(
+      "name" -> "offset",
+      "expr" -> "\"abc\"",
+      "dataType" -> "\"STRING\""
+    )
   )
 
-  errorTest(
+  errorClassTest(
     "an evaluated offset class must not be long",
     testRelation.offset(Literal(10L, LongType)),
-    "The offset expression must be integer type, but got bigint" :: Nil
+    "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
+    Map(
+      "name" -> "offset",
+      "expr" -> "\"10\"",
+      "dataType" -> "\"BIGINT\""
+    )
   )
 
-  errorTest(
+  errorClassTest(
     "an evaluated offset class must not be null",
     testRelation.offset(Literal(null, IntegerType)),
-    "The evaluated offset expression must not be null, but got " :: Nil
+    "INVALID_LIMIT_LIKE_EXPRESSION.IS_NULL",
+    Map(
+      "name" -> "offset",
+      "expr" -> "\"NULL\""
+    )
   )
 
-  errorTest(
+  errorClassTest(
     "num_rows in offset clause must be equal to or greater than 0",
     testRelation.offset(-1),
-    "The offset expression must be equal to or greater than 0, but got -1" :: 
Nil
+    "INVALID_LIMIT_LIKE_EXPRESSION.IS_NEGATIVE",
+    Map(
+      "name" -> "offset",
+      "expr" -> "\"-1\"",
+      "v" -> "-1"
+    )
   )
 
   errorClassTest(
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out
index 3b2ddb5dae1..f041026f308 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/limit.sql.out
@@ -54,8 +54,9 @@ SELECT * FROM testdata LIMIT -1
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2403",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_NEGATIVE",
   "messageParameters" : {
+    "expr" : "\"-1\"",
     "name" : "limit",
     "v" : "-1"
   },
@@ -74,8 +75,9 @@ SELECT * FROM testData TABLESAMPLE (-1 ROWS)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2403",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_NEGATIVE",
   "messageParameters" : {
+    "expr" : "\"-1\"",
     "name" : "limit",
     "v" : "-1"
   },
@@ -104,9 +106,9 @@ SELECT * FROM testdata LIMIT CAST(NULL AS INT)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2402",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_NULL",
   "messageParameters" : {
-    "limitExpr" : "CAST(NULL AS INT)",
+    "expr" : "\"CAST(NULL AS INT)\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -124,9 +126,9 @@ SELECT * FROM testdata LIMIT key > 3
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "\"(key > 3)\"",
+    "expr" : "\"(key > 3)\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -144,9 +146,10 @@ SELECT * FROM testdata LIMIT true
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2401",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
   "messageParameters" : {
-    "dataType" : "boolean",
+    "dataType" : "\"BOOLEAN\"",
+    "expr" : "\"true\"",
     "name" : "limit"
   }
 }
@@ -157,9 +160,10 @@ SELECT * FROM testdata LIMIT 'a'
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2401",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
   "messageParameters" : {
-    "dataType" : "string",
+    "dataType" : "\"STRING\"",
+    "expr" : "\"a\"",
     "name" : "limit"
   },
   "queryContext" : [ {
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
index 7ef2912cef2..495f425dd5e 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/postgreSQL/limit.sql.out
@@ -141,9 +141,9 @@ select * from int8_tbl limit (case when random() < 0.5 then 
bigint(null) end)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
+    "expr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -161,9 +161,9 @@ select * from int8_tbl offset (case when random() < 0.5 
then bigint(null) end)
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
+    "expr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "offset"
   },
   "queryContext" : [ {
diff --git a/sql/core/src/test/resources/sql-tests/results/limit.sql.out 
b/sql/core/src/test/resources/sql-tests/results/limit.sql.out
index 9a288681526..e07ee71aa88 100644
--- a/sql/core/src/test/resources/sql-tests/results/limit.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/limit.sql.out
@@ -51,8 +51,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2403",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_NEGATIVE",
   "messageParameters" : {
+    "expr" : "\"-1\"",
     "name" : "limit",
     "v" : "-1"
   },
@@ -73,8 +74,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2403",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_NEGATIVE",
   "messageParameters" : {
+    "expr" : "\"-1\"",
     "name" : "limit",
     "v" : "-1"
   },
@@ -103,9 +105,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2402",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_NULL",
   "messageParameters" : {
-    "limitExpr" : "CAST(NULL AS INT)",
+    "expr" : "\"CAST(NULL AS INT)\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -125,9 +127,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "\"(key > 3)\"",
+    "expr" : "\"(key > 3)\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -147,9 +149,10 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2401",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
   "messageParameters" : {
-    "dataType" : "boolean",
+    "dataType" : "\"BOOLEAN\"",
+    "expr" : "\"true\"",
     "name" : "limit"
   }
 }
@@ -162,9 +165,10 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2401",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.DATA_TYPE",
   "messageParameters" : {
-    "dataType" : "string",
+    "dataType" : "\"STRING\"",
+    "expr" : "\"a\"",
     "name" : "limit"
   },
   "queryContext" : [ {
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out
index 1489399c782..e76ba10299e 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out
@@ -132,9 +132,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
+    "expr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "limit"
   },
   "queryContext" : [ {
@@ -154,9 +154,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "LIMIT_LIKE_EXPRESSION_IS_UNFOLDABLE",
+  "errorClass" : "INVALID_LIMIT_LIKE_EXPRESSION.IS_UNFOLDABLE",
   "messageParameters" : {
-    "limitExpr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
+    "expr" : "\"CASE WHEN (_nondeterministic < 0.5) THEN NULL END\"",
     "name" : "offset"
   },
   "queryContext" : [ {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to