This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 15c9ec7cbbbb [SPARK-42332][SQL] Changing the require to a 
SparkException in ComplexTypeMergingExpression
15c9ec7cbbbb is described below

commit 15c9ec7cbbbba3b66ec413b7964a374cb9508a80
Author: hannahkamundson <hannahkamund...@gmail.com>
AuthorDate: Sun Jan 21 23:28:17 2024 +0300

    [SPARK-42332][SQL] Changing the require to a SparkException in 
ComplexTypeMergingExpression
    
    ### What changes were proposed in this pull request?
    
    - I created `SparkException.require` which is the same as the Scala 
`require` precondition except it uses `SparkIllegalArgumentException`.
    - I changed 2 `require`s to use this `SparkException.require` in 
`ComplexTypeMergingExpression`
    - The exception message was updated for these `require`s
    - A SQLState error was added for the `require`s in 
`ComplexTypeMergingExpression`
    
    ### Why are the changes needed?
    All user facing exceptions should be `SparkException`s. The `require`s need 
to be changed to match this convention.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes.
    
    - `ComplexTypeMergingExpression throws a `SparkIllegalArgumentException` 
instead of `IllegalArgumentException`.
    - It also updates the SqlState for these
    - It also updates the messages associated with the exceptions thrown
    - It also updates the docs with the new error classes.
    
    ### How was this patch tested?
    Unit tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #44336 from hannahkamundson/SPARK-42332.
    
    Lead-authored-by: hannahkamundson <hannahkamund...@gmail.com>
    Co-authored-by: Hannah <48397717+hannahkamund...@users.noreply.github.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/resources/error/error-classes.json    | 18 +++++++++++
 .../scala/org/apache/spark/SparkException.scala    | 15 +++++++++
 ...lex-expression-unsupported-input-error-class.md | 36 ++++++++++++++++++++++
 docs/sql-error-conditions.md                       |  8 +++++
 .../sql/catalyst/expressions/Expression.scala      | 19 +++++++-----
 .../sql/errors/QueryCompilationErrorsSuite.scala   | 35 ++++++++++++++++++---
 6 files changed, 118 insertions(+), 13 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 8794a8632c52..072081f48448 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -506,6 +506,24 @@
     ],
     "sqlState" : "22004"
   },
+  "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT" : {
+    "message" : [
+      "Cannot process input data types for the expression: <expression>."
+    ],
+    "subClass" : {
+      "MISMATCHED_TYPES" : {
+        "message" : [
+          "All input types must be the same except nullable, containsNull, 
valueContainsNull flags, but found the input types <inputTypes>."
+        ]
+      },
+      "NO_INPUTS" : {
+        "message" : [
+          "The collection of input data types must not be empty."
+        ]
+      }
+    },
+    "sqlState" : "42K09"
+  },
   "CONCURRENT_QUERY" : {
     "message" : [
       "Another instance of this query was just started by a concurrent 
session."
diff --git a/common/utils/src/main/scala/org/apache/spark/SparkException.scala 
b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
index 67bdc23b5f08..ebb6e772249b 100644
--- a/common/utils/src/main/scala/org/apache/spark/SparkException.scala
+++ b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
@@ -106,6 +106,21 @@ object SparkException {
       messageParameters = Map("message" -> msg),
       cause = cause)
   }
+
+  /**
+   * This is like the Scala require precondition, except it uses 
SparkIllegalArgumentException.
+   * @param requirement The requirement you want to check
+   * @param errorClass The error class to type if the requirement isn't passed
+   * @param messageParameters Message parameters to append to the message
+   */
+  def require(
+      requirement: Boolean,
+      errorClass: String,
+      messageParameters: Map[String, String]): Unit = {
+    if (!requirement) {
+      throw new SparkIllegalArgumentException(errorClass, messageParameters)
+    }
+  }
 }
 
 /**
diff --git 
a/docs/sql-error-conditions-complex-expression-unsupported-input-error-class.md 
b/docs/sql-error-conditions-complex-expression-unsupported-input-error-class.md
new file mode 100644
index 000000000000..e8b8630c19aa
--- /dev/null
+++ 
b/docs/sql-error-conditions-complex-expression-unsupported-input-error-class.md
@@ -0,0 +1,36 @@
+---
+layout: global
+title: COMPLEX_EXPRESSION_UNSUPPORTED_INPUT error class
+displayTitle: COMPLEX_EXPRESSION_UNSUPPORTED_INPUT error class
+license: |
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+---
+
+[SQLSTATE: 
42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
+
+Cannot process input data types for the expression: `<expression>`.
+
+This error class has the following derived error classes:
+
+## MISMATCHED_TYPES
+
+All input types must be the same except nullable, containsNull, 
valueContainsNull flags, but found the input types `<inputTypes>`.
+
+## NO_INPUTS
+
+The collection of input data types must not be empty.
+
+
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index 74eaa4228612..133cfbacb5fa 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -419,6 +419,14 @@ The comparator has returned a NULL for a comparison 
between `<firstValue>` and `
 It should return a positive integer for "greater than", 0 for "equal" and a 
negative integer for "less than".
 To revert to deprecated behavior where NULL is treated as 0 (equal), you must 
set "spark.sql.legacy.allowNullComparisonResultInArraySort" to "true".
 
+### 
[COMPLEX_EXPRESSION_UNSUPPORTED_INPUT](sql-error-conditions-complex-expression-unsupported-input-error-class.html)
+
+[SQLSTATE: 
42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
+
+Cannot process input data types for the expression: `<expression>`.
+
+For more details see 
[COMPLEX_EXPRESSION_UNSUPPORTED_INPUT](sql-error-conditions-complex-expression-unsupported-input-error-class.html)
+
 ### CONCURRENT_QUERY
 
 [SQLSTATE: 
0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 484418f5e5a7..a3432716002a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -23,6 +23,7 @@ import org.apache.spark.{QueryContext, SparkException}
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, 
TypeCheckResult, TypeCoercion}
 import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
+import org.apache.spark.sql.catalyst.expressions.Cast.{toSQLExpr, toSQLType}
 import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateFunction
 import org.apache.spark.sql.catalyst.expressions.codegen._
 import org.apache.spark.sql.catalyst.expressions.codegen.Block._
@@ -1305,14 +1306,16 @@ trait ComplexTypeMergingExpression extends Expression {
   lazy val inputTypesForMerging: Seq[DataType] = children.map(_.dataType)
 
   def dataTypeCheck: Unit = {
-    require(
-      inputTypesForMerging.nonEmpty,
-      "The collection of input data types must not be empty.")
-    require(
-      TypeCoercion.haveSameType(inputTypesForMerging),
-      "All input types must be the same except nullable, containsNull, 
valueContainsNull flags. " +
-        s"The expression is: $this. " +
-        s"The input types found 
are\n\t${inputTypesForMerging.mkString("\n\t")}.")
+    SparkException.require(
+      requirement = inputTypesForMerging.nonEmpty,
+      errorClass = "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.NO_INPUTS",
+      messageParameters = Map("expression" -> toSQLExpr(this)))
+    SparkException.require(
+      requirement = TypeCoercion.haveSameType(inputTypesForMerging),
+      errorClass = "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.MISMATCHED_TYPES",
+      messageParameters = Map(
+        "expression" -> toSQLExpr(this),
+        "inputTypes" -> inputTypesForMerging.map(toSQLType).mkString("[", ", 
", "]")))
   }
 
   private lazy val internalDataType: DataType = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index 3c397bd6a4e1..7ea0c3843444 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -17,17 +17,17 @@
 
 package org.apache.spark.sql.errors
 
-import org.apache.spark.{SPARK_DOC_ROOT, SparkUnsupportedOperationException}
-import org.apache.spark.sql.{AnalysisException, ClassData, 
IntegratedUDFTestUtils, QueryTest, Row}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkIllegalArgumentException, 
SparkUnsupportedOperationException}
+import org.apache.spark.sql._
 import org.apache.spark.sql.api.java.{UDF1, UDF2, UDF23Test}
-import org.apache.spark.sql.catalyst.expressions.UnsafeRow
+import org.apache.spark.sql.catalyst.expressions.{Coalesce, Literal, UnsafeRow}
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
 import org.apache.spark.sql.expressions.SparkUserDefinedFunction
-import org.apache.spark.sql.functions.{array, from_json, grouping, 
grouping_id, lit, struct, sum, udf}
+import org.apache.spark.sql.functions._
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSparkSession
-import org.apache.spark.sql.types.{BooleanType, IntegerType, MapType, 
StringType, StructField, StructType}
+import org.apache.spark.sql.types._
 import org.apache.spark.util.Utils
 
 case class StringLongClass(a: String, b: Long)
@@ -920,6 +920,31 @@ class QueryCompilationErrorsSuite
     }
   }
 
+  test("ComplexTypeMergingExpression should throw exception if no children") {
+    val coalesce = Coalesce(Seq.empty)
+
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        coalesce.dataType
+      },
+      errorClass = "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.NO_INPUTS",
+      parameters = Map("expression" -> "\"coalesce()\""))
+  }
+
+  test("ComplexTypeMergingExpression should throw " +
+    "exception if children have different data types") {
+    val coalesce = Coalesce(Seq(Literal(1), Literal("a"), Literal("a")))
+
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        coalesce.dataType
+      },
+      errorClass = "COMPLEX_EXPRESSION_UNSUPPORTED_INPUT.MISMATCHED_TYPES",
+      parameters = Map(
+        "expression" -> "\"coalesce(1, a, a)\"",
+        "inputTypes" -> "[\"INT\", \"STRING\", \"STRING\"]"))
+  }
+
   test("UNSUPPORTED_CALL: call the unsupported method update()") {
     checkError(
       exception = intercept[SparkUnsupportedOperationException] {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to