This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0205478b9d3 [SPARK-41038][SQL] Rename `MULTI_VALUE_SUBQUERY_ERROR` to 
`SCALAR_SUBQUERY_TOO_MANY_ROWS`
0205478b9d3 is described below

commit 0205478b9d35d62450fd7c9ade520087fd2979a7
Author: itholic <haejoon....@databricks.com>
AuthorDate: Wed Nov 9 19:14:32 2022 +0300

    [SPARK-41038][SQL] Rename `MULTI_VALUE_SUBQUERY_ERROR` to 
`SCALAR_SUBQUERY_TOO_MANY_ROWS`
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to rename the `MULTI_VALUE_SUBQUERY_ERROR` to 
`SCALAR_SUBQUERY_TOO_MANY_ROWS`.
    
    ### Why are the changes needed?
    
    The current error class name `MULTI_VALUE_SUBQUERY_ERROR` is not clear 
enough to brief the error situation.
    
    `SCALAR_SUBQUERY_TOO_MANY_ROWS` would be more readable since the "scalar 
subquery" is the industrial term.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    ```
    ./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*"
    ```
    
    Closes #38551 from itholic/SPARK-41038.
    
    Authored-by: itholic <haejoon....@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json               | 10 +++++-----
 .../org/apache/spark/sql/errors/QueryExecutionErrors.scala     |  2 +-
 .../subquery/scalar-subquery/scalar-subquery-select.sql.out    |  2 +-
 .../org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala  |  4 ++--
 .../apache/spark/sql/errors/QueryExecutionErrorsSuite.scala    |  4 ++--
 5 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 9c914b86bb1..7c33c1059ae 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -690,11 +690,6 @@
       "Not allowed to implement multiple UDF interfaces, UDF class <className>"
     ]
   },
-  "MULTI_VALUE_SUBQUERY_ERROR" : {
-    "message" : [
-      "More than one row returned by a subquery used as an expression."
-    ]
-  },
   "NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION" : {
     "message" : [
       "When there are more than one MATCHED clauses in a MERGE statement, only 
the last MATCHED clause can omit the condition."
@@ -878,6 +873,11 @@
     ],
     "sqlState" : "42000"
   },
+  "SCALAR_SUBQUERY_TOO_MANY_ROWS" : {
+    "message" : [
+      "More than one row returned by a subquery used as an expression."
+    ]
+  },
   "SCHEMA_ALREADY_EXISTS" : {
     "message" : [
       "Cannot create schema <schemaName> because it already exists.",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 73664e64c22..828f52fe71d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -2766,7 +2766,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
 
   def multipleRowSubqueryError(context: SQLQueryContext): Throwable = {
     new SparkException(
-      errorClass = "MULTI_VALUE_SUBQUERY_ERROR",
+      errorClass = "SCALAR_SUBQUERY_TOO_MANY_ROWS",
       messageParameters = Map.empty,
       cause = null,
       context = getQueryContext(context),
diff --git 
a/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-select.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-select.sql.out
index 38ab365ef69..0012251d7eb 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-select.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/subquery/scalar-subquery/scalar-subquery-select.sql.out
@@ -424,7 +424,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkException
 {
-  "errorClass" : "MULTI_VALUE_SUBQUERY_ERROR",
+  "errorClass" : "SCALAR_SUBQUERY_TOO_MANY_ROWS",
   "queryContext" : [ {
     "objectType" : "",
     "objectName" : "",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index c9c66395a3b..25faa34b697 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -2676,7 +2676,7 @@ class DataSourceV2SQLSuiteV1Filter extends 
DataSourceV2SQLSuite with AlterTableT
         exception = intercept[SparkException] {
           sql(s"SELECT * FROM t TIMESTAMP AS OF ($subquery4)").collect()
         },
-        errorClass = "MULTI_VALUE_SUBQUERY_ERROR",
+        errorClass = "SCALAR_SUBQUERY_TOO_MANY_ROWS",
         parameters = Map.empty,
         ExpectedContext(
           fragment = "(SELECT * FROM VALUES (1), (2))",
@@ -2686,7 +2686,7 @@ class DataSourceV2SQLSuiteV1Filter extends 
DataSourceV2SQLSuite with AlterTableT
         exception = intercept[SparkException] {
           sql(s"SELECT * FROM t TIMESTAMP AS OF (SELECT 
($subquery4))").collect()
         },
-        errorClass = "MULTI_VALUE_SUBQUERY_ERROR",
+        errorClass = "SCALAR_SUBQUERY_TOO_MANY_ROWS",
         parameters = Map.empty,
         ExpectedContext(
           fragment = "(SELECT * FROM VALUES (1), (2))",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 496c454509b..b5166c5dfb8 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -574,13 +574,13 @@ class QueryExecutionErrorsSuite
   }
 
   test(
-    "MULTI_VALUE_SUBQUERY_ERROR: " +
+    "SCALAR_SUBQUERY_TOO_MANY_ROWS: " +
     "More than one row returned by a subquery used as an expression") {
     checkError(
       exception = intercept[SparkException] {
         sql("select (select a from (select 1 as a union all select 2 as a) t) 
as b").collect()
       },
-      errorClass = "MULTI_VALUE_SUBQUERY_ERROR",
+      errorClass = "SCALAR_SUBQUERY_TOO_MANY_ROWS",
       queryContext = Array(
         ExpectedContext(
           fragment = "(select a from (select 1 as a union all select 2 as a) 
t)",


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to