This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 82cd9932446 [SPARK-39905][SQL][TESTS] Remove `checkErrorClass()` and 
use `checkError()` instead
82cd9932446 is described below

commit 82cd993244656153551c9bb122247789d2a5b6b4
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Fri Jul 29 10:00:14 2022 +0500

    [SPARK-39905][SQL][TESTS] Remove `checkErrorClass()` and use `checkError()` 
instead
    
    ### What changes were proposed in this pull request?
    Replace all invokes of `checkErrorClass()` by `checkError()` and remove 
`checkErrorClass()`.
    
    ### Why are the changes needed?
    1. To prepare test infra for testing of query contexts.
    2. To check message parameters instead of entire text message. This PR is 
some kind of follow up of https://github.com/apache/spark/pull/36693.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    By running the modified test suites:
    ```
    $ build/sbt "sql/testOnly *DatasetUnpivotSuite"
    $ build/sbt "sql/testOnly *QueryCompilationErrorsSuite"
    $ build/sbt "test:testOnly *QueryExecutionErrorsSuite"
    ```
    
    Closes #37322 from MaxGekk/test-query-context.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../scala/org/apache/spark/SparkFunSuite.scala     | 12 +---
 .../catalyst/encoders/EncoderResolutionSuite.scala | 36 +++++-----
 .../apache/spark/sql/types/StructTypeSuite.scala   |  6 +-
 .../org/apache/spark/sql/DatasetUnpivotSuite.scala | 74 ++++++++++----------
 .../sql/errors/QueryCompilationErrorsSuite.scala   | 81 +++++++---------------
 .../spark/sql/errors/QueryErrorsSuiteBase.scala    | 21 ------
 .../sql/errors/QueryExecutionAnsiErrorsSuite.scala | 72 +++++++------------
 .../sql/errors/QueryExecutionErrorsSuite.scala     | 10 +--
 8 files changed, 112 insertions(+), 200 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
index c4d3735f320..bdf360fab6d 100644
--- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -292,9 +292,9 @@ abstract class SparkFunSuite
   protected def checkError(
       exception: SparkThrowable,
       errorClass: String,
-      errorSubClass: Option[String],
-      sqlState: Option[String],
-      parameters: Map[String, String],
+      errorSubClass: Option[String] = None,
+      sqlState: Option[String] = None,
+      parameters: Map[String, String] = Map.empty,
       matchPVals: Boolean = false): Unit = {
     assert(exception.getErrorClass === errorClass)
     if (exception.getErrorSubClass != null) {
@@ -335,12 +335,6 @@ abstract class SparkFunSuite
       parameters: Map[String, String]): Unit =
     checkError(exception, errorClass, None, Some(sqlState), parameters)
 
-  protected def checkError(
-      exception: SparkThrowable,
-      errorClass: String,
-      parameters: Map[String, String]): Unit =
-    checkError(exception, errorClass, None, None, parameters)
-
   class LogAppender(msg: String = "", maxEvents: Int = 1000)
       extends AbstractAppender("logAppender", null, null, true, 
Property.EMPTY_ARRAY) {
     private val _loggingEvents = new ArrayBuffer[LogEvent]()
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
index c0877bea148..c8d2a002f2a 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
@@ -121,11 +121,11 @@ class EncoderResolutionSuite extends PlanTest {
   test("the real type is not compatible with encoder schema: non-array field") 
{
     val encoder = ExpressionEncoder[ArrayClass]
     val attrs = Seq($"arr".int)
-    checkError(exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
+    checkError(
+      exception = intercept[AnalysisException](encoder.resolveAndBind(attrs)),
       errorClass = "UNSUPPORTED_DESERIALIZER",
       errorSubClass = Some("DATA_TYPE_MISMATCH"),
-      parameters = Map("desiredType" -> "\"ARRAY\"", "dataType" -> "\"INT\""),
-      sqlState = None)
+      parameters = Map("desiredType" -> "\"ARRAY\"", "dataType" -> "\"INT\""))
   }
 
   test("the real type is not compatible with encoder schema: array element 
type") {
@@ -140,11 +140,11 @@ class EncoderResolutionSuite extends PlanTest {
 
     withClue("inner element is not array") {
       val attrs = Seq($"nestedArr".array(new StructType().add("arr", "int")))
-      checkError(exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
+      checkError(
+        exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
         errorClass = "UNSUPPORTED_DESERIALIZER",
         errorSubClass = Some("DATA_TYPE_MISMATCH"),
-        parameters = Map("desiredType" -> "\"ARRAY\"", "dataType" -> 
"\"INT\""),
-        sqlState = None)
+        parameters = Map("desiredType" -> "\"ARRAY\"", "dataType" -> 
"\"INT\""))
     }
 
     withClue("nested array element type is not compatible") {
@@ -177,22 +177,22 @@ class EncoderResolutionSuite extends PlanTest {
 
     {
       val attrs = Seq($"a".string, $"b".long, $"c".int)
-      checkError(exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
+      checkError(
+        exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
         errorClass = "UNSUPPORTED_DESERIALIZER",
         errorSubClass = Some("FIELD_NUMBER_MISMATCH"),
         parameters = Map("schema" -> "\"STRUCT<a: STRING, b: BIGINT, c: 
INT>\"",
-          "ordinal" -> "2"),
-        sqlState = None)
+          "ordinal" -> "2"))
     }
 
     {
       val attrs = Seq($"a".string)
-      checkError(exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
+      checkError(
+        exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
         errorClass = "UNSUPPORTED_DESERIALIZER",
         errorSubClass = Some("FIELD_NUMBER_MISMATCH"),
         parameters = Map("schema" -> "\"STRUCT<a: STRING>\"",
-          "ordinal" -> "2"),
-        sqlState = None)
+          "ordinal" -> "2"))
     }
   }
 
@@ -201,22 +201,22 @@ class EncoderResolutionSuite extends PlanTest {
 
     {
       val attrs = Seq($"a".string, $"b".struct($"x".long, $"y".string, 
$"z".int))
-      checkError(exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
+      checkError(
+        exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
         errorClass = "UNSUPPORTED_DESERIALIZER",
         errorSubClass = Some("FIELD_NUMBER_MISMATCH"),
         parameters = Map("schema" -> "\"STRUCT<x: BIGINT, y: STRING, z: 
INT>\"",
-          "ordinal" -> "2"),
-        sqlState = None)
+          "ordinal" -> "2"))
     }
 
     {
       val attrs = Seq($"a".string, $"b".struct($"x".long))
-      checkError(exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
+      checkError(
+        exception = 
intercept[AnalysisException](encoder.resolveAndBind(attrs)),
         errorClass = "UNSUPPORTED_DESERIALIZER",
         errorSubClass = Some("FIELD_NUMBER_MISMATCH"),
         parameters = Map("schema" -> "\"STRUCT<x: BIGINT>\"",
-          "ordinal" -> "2"),
-        sqlState = None)
+          "ordinal" -> "2"))
     }
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
index 96813369172..dd5bed3b30c 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
@@ -328,8 +328,10 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper 
{
     e = intercept[AnalysisException] {
       check(Seq("S2", "x"), None)
     }
-    checkError(e, "AMBIGUOUS_COLUMN_OR_FIELD",
-      Map("name" -> "`S2`.`x`", "n" -> "2"))
+    checkError(
+      exception = e,
+      errorClass = "AMBIGUOUS_COLUMN_OR_FIELD",
+      parameters = Map("name" -> "`S2`.`x`", "n" -> "2"))
     caseSensitiveCheck(Seq("s2", "x"), Some(Seq("s2") -> StructField("x", 
IntegerType)))
 
     // simple map type
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala
index 8ccad457e8d..b860f950325 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala
@@ -305,14 +305,15 @@ class DatasetUnpivotSuite extends QueryTest
           valueColumnName = "val"
         )
     }
-    checkErrorClass(
+    checkError(
       exception = e,
       errorClass = "UNPIVOT_VALUE_DATA_TYPE_MISMATCH",
-      msg = "Unpivot value columns must share a least common type, some types 
do not: \\[" +
-        "\"STRING\" \\(`str1#\\d+`\\), " +
-        "\"INT\" \\(`int1#\\d+`, `int2#\\d+`, `int3#\\d+`, ...\\), " +
-        "\"BIGINT\" \\(`long1#\\d+L`, `long2#\\d+L`\\)\\];(\n.*)*",
-      matchMsg = true)
+      parameters = Map(
+        "types" ->
+          (""""STRING" \(`str1#\d+`\), """ +
+           """"INT" \(`int1#\d+`, `int2#\d+`, `int3#\d+`, ...\), """ +
+           """"BIGINT" \(`long1#\d+L`, `long2#\d+L`\)""")),
+      matchPVals = true)
   }
 
   test("unpivot with compatible value types") {
@@ -358,12 +359,12 @@ class DatasetUnpivotSuite extends QueryTest
         valueColumnName = "val"
       )
     }
-    checkErrorClass(
+    checkError(
       exception = e1,
       errorClass = "UNRESOLVED_COLUMN",
-      msg = "A column or function parameter with name `1` cannot be 
resolved\\. " +
-        "Did you mean one of the following\\? \\[`id`, `int1`, `str1`, `str2`, 
`long1`\\];(\n.*)*",
-      matchMsg = true)
+      parameters = Map(
+        "objectName" -> "`1`",
+        "objectList" -> "`id`, `int1`, `str1`, `str2`, `long1`"))
 
     // unpivoting where value column does not exist
     val e2 = intercept[AnalysisException] {
@@ -374,12 +375,12 @@ class DatasetUnpivotSuite extends QueryTest
         valueColumnName = "val"
       )
     }
-    checkErrorClass(
+    checkError(
       exception = e2,
       errorClass = "UNRESOLVED_COLUMN",
-      msg = "A column or function parameter with name `does` cannot be 
resolved\\. " +
-        "Did you mean one of the following\\? \\[`id`, `int1`, `long1`, 
`str1`, `str2`\\];(\n.*)*",
-      matchMsg = true)
+      parameters = Map(
+        "objectName" -> "`does`",
+        "objectList" -> "`id`, `int1`, `long1`, `str1`, `str2`"))
 
     // unpivoting with empty list of value columns
     // where potential value columns are of incompatible types
@@ -391,14 +392,14 @@ class DatasetUnpivotSuite extends QueryTest
         valueColumnName = "val"
       )
     }
-    checkErrorClass(
+    checkError(
       exception = e3,
       errorClass = "UNPIVOT_VALUE_DATA_TYPE_MISMATCH",
-      msg = "Unpivot value columns must share a least common type, some types 
do not: \\[" +
-        "\"INT\" \\(`id#\\d+`, `int1#\\d+`\\), " +
-        "\"STRING\" \\(`str1#\\d+`, `str2#\\d+`\\), " +
-        "\"BIGINT\" \\(`long1#\\d+L`\\)\\];(\n.*)*",
-      matchMsg = true)
+      parameters = Map("types" ->
+        (""""INT" \(`id#\d+`, `int1#\d+`\), """ +
+         """"STRING" \(`str1#\d+`, `str2#\d+`\), """ +
+         """"BIGINT" \(`long1#\d+L`\)""")),
+      matchPVals = true)
 
     // unpivoting with star id columns so that no value columns are left
     val e4 = intercept[AnalysisException] {
@@ -409,12 +410,10 @@ class DatasetUnpivotSuite extends QueryTest
         valueColumnName = "val"
       )
     }
-    checkErrorClass(
+    checkError(
       exception = e4,
       errorClass = "UNPIVOT_REQUIRES_VALUE_COLUMNS",
-      msg = "At least one value column needs to be specified for UNPIVOT, " +
-        "all columns specified as ids;(\\n.*)*",
-      matchMsg = true)
+      parameters = Map())
 
     // unpivoting with star value columns
     // where potential value columns are of incompatible types
@@ -426,14 +425,14 @@ class DatasetUnpivotSuite extends QueryTest
         valueColumnName = "val"
       )
     }
-    checkErrorClass(
+    checkError(
       exception = e5,
       errorClass = "UNPIVOT_VALUE_DATA_TYPE_MISMATCH",
-      msg = "Unpivot value columns must share a least common type, some types 
do not: \\[" +
-        "\"INT\" \\(`id#\\d+`, `int1#\\d+`\\), " +
-        "\"STRING\" \\(`str1#\\d+`, `str2#\\d+`\\), " +
-        "\"BIGINT\" \\(`long1#\\d+L`\\)\\];(\n.*)*",
-      matchMsg = true)
+      parameters = Map("types" ->
+        (""""INT" \(`id#\d+`, `int1#\d+`\), """ +
+         """"STRING" \(`str1#\d+`, `str2#\d+`\), """ +
+         """"BIGINT" \(`long1#\d+L`\)""")),
+      matchPVals = true)
 
     // unpivoting without giving values and no non-id columns
     val e6 = intercept[AnalysisException] {
@@ -444,12 +443,10 @@ class DatasetUnpivotSuite extends QueryTest
         valueColumnName = "val"
       )
     }
-    checkErrorClass(
+    checkError(
       exception = e6,
       errorClass = "UNPIVOT_REQUIRES_VALUE_COLUMNS",
-      msg = "At least one value column needs to be specified for UNPIVOT, " +
-        "all columns specified as ids;(\\n.*)*",
-      matchMsg = true)
+      parameters = Map.empty)
   }
 
   test("unpivot after pivot") {
@@ -499,14 +496,13 @@ class DatasetUnpivotSuite extends QueryTest
         valueColumnName = "val"
       )
     }
-    checkErrorClass(
+    checkError(
       exception = e,
       errorClass = "UNRESOLVED_COLUMN",
       // expected message is wrong: 
https://issues.apache.org/jira/browse/SPARK-39783
-      msg = "A column or function parameter with name `an`\\.`id` cannot be 
resolved\\. " +
-        "Did you mean one of the following\\? " +
-        "\\[`an`.`id`, `int1`, `long1`, `str`.`one`, `str`.`two`\\];(\n.*)*",
-      matchMsg = true)
+      parameters = Map(
+        "objectName" -> "`an`.`id`",
+        "objectList" -> "`an`.`id`, `int1`, `long1`, `str`.`one`, 
`str`.`two`"))
   }
 
   test("unpivot with struct fields") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index 667fc1df42c..85aa7221b0e 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -111,14 +111,15 @@ class QueryCompilationErrorsSuite
 
   test("INVALID_PARAMETER_VALUE: the argument_index of string format is 
invalid") {
     withSQLConf(SQLConf.ALLOW_ZERO_INDEX_IN_FORMAT_STRING.key -> "false") {
-      val e = intercept[AnalysisException] {
-        sql("select format_string('%0$s', 'Hello')")
-      }
-      checkErrorClass(
-        exception = e,
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("select format_string('%0$s', 'Hello')")
+        },
         errorClass = "INVALID_PARAMETER_VALUE",
-        msg = "The value of parameter(s) 'strfmt' in `format_string` is 
invalid: " +
-          "expects %1$, %2$ and so on, but got %0$.; line 1 pos 7")
+        parameters = Map(
+          "parameter" -> "strfmt",
+          "functionName" -> "`format_string`",
+          "expected" -> "expects %1$, %2$ and so on, but got %0$."))
     }
   }
 
@@ -273,7 +274,6 @@ class QueryCompilationErrorsSuite
     checkError(
       exception = groupingColMismatchEx,
       errorClass = "GROUPING_COLUMN_MISMATCH",
-      errorSubClass = None,
       parameters = Map("grouping" -> "earnings.*", "groupingColumns" -> 
"course.*,year.*"),
       sqlState = Some("42000"),
       matchPVals = true)
@@ -286,7 +286,6 @@ class QueryCompilationErrorsSuite
     checkError(
       exception = groupingIdColMismatchEx,
       errorClass = "GROUPING_ID_COLUMN_MISMATCH",
-      errorSubClass = None,
       parameters = Map("groupingIdColumn" -> "earnings.*",
       "groupByColumns" -> "course.*,year.*"),
       sqlState = Some("42000"),
@@ -424,7 +423,7 @@ class QueryCompilationErrorsSuite
           |order by a, b
           |""".stripMargin), Row(1, 2) :: Nil)
 
-    checkErrorClass(
+    checkError(
       exception = intercept[AnalysisException] {
         sql(
           """select distinct struct.a, struct.b
@@ -436,19 +435,7 @@ class QueryCompilationErrorsSuite
             |""".stripMargin)
       },
       errorClass = "UNRESOLVED_COLUMN",
-      msg = """A column or function parameter with name `struct`.`a` cannot be 
resolved. """ +
-        """Did you mean one of the following\? \[`a`, `b`\]; line 6 pos 9;
-           |'Sort \['struct.a ASC NULLS FIRST, 'struct.b ASC NULLS FIRST\], 
true
-           |\+\- Distinct
-           |   \+\- Project \[struct#\w+\.a AS a#\w+, struct#\w+\.b AS b#\w+\]
-           |      \+\- SubqueryAlias tmp
-           |         \+\- Union false, false
-           |            :\- Project \[named_struct\(a, 1, b, 2, c, 3\) AS 
struct#\w+\]
-           |            :  \+\- OneRowRelation
-           |            \+\- Project \[named_struct\(a, 1, b, 2, c, 4\) AS 
struct#\w+\]
-           |               \+\- OneRowRelation
-           |""".stripMargin,
-      matchMsg = true)
+      parameters = Map("objectName" -> "`struct`.`a`", "objectList" -> "`a`, 
`b`"))
   }
 
   test("UNRESOLVED_COLUMN - SPARK-21335: support un-aliased subquery") {
@@ -456,21 +443,12 @@ class QueryCompilationErrorsSuite
       Seq(1 -> "a").toDF("i", "j").createOrReplaceTempView("v")
       checkAnswer(sql("SELECT i from (SELECT i FROM v)"), Row(1))
 
-      checkErrorClass(
+      checkError(
         exception = intercept[AnalysisException](sql("SELECT v.i from (SELECT 
i FROM v)")),
         errorClass = "UNRESOLVED_COLUMN",
-        msg = "A column or function parameter with name `v`.`i` cannot be 
resolved. " +
-          """Did you mean one of the following\? """ +
-          """\[`__auto_generated_subquery_name`.`i`\]; line 1 pos 7;
-            |'Project \['v.i\]
-            |\+\- SubqueryAlias __auto_generated_subquery_name
-            |   \+\- Project \[i#\w+\]
-            |      \+\- SubqueryAlias v
-            |         \+\- View \(`v`, \[i#\w+,j#\w+\]\)
-            |            \+\- Project \[_\w+#\w+ AS i#\w+, _\w+#\w+ AS j#\w+\]
-            |               \+\- LocalRelation \[_\w+#\w+, _\w+#\w+\]
-            |""".stripMargin,
-        matchMsg = true)
+        parameters = Map(
+          "objectName" -> "`v`.`i`",
+          "objectList" -> "`__auto_generated_subquery_name`.`i`"))
 
       checkAnswer(sql("SELECT __auto_generated_subquery_name.i from (SELECT i 
FROM v)"), Row(1))
     }
@@ -521,10 +499,10 @@ class QueryCompilationErrorsSuite
       val e = intercept[AnalysisException] {
         sql("ALTER TABLE t ADD COLUMNS (m.n int)")
       }
-      checkErrorClass(
+      checkError(
         exception = e,
         errorClass = "INVALID_FIELD_NAME",
-        msg = "Field name `m`.`n` is invalid: `m` is not a struct.; line 1 pos 
27")
+        parameters = Map("fieldName" -> "`m`.`n`", "path" -> "`m`"))
     }
   }
 
@@ -555,8 +533,7 @@ class QueryCompilationErrorsSuite
       exception = e,
       errorClass = "UNSUPPORTED_DESERIALIZER",
       errorSubClass = Some("DATA_TYPE_MISMATCH"),
-      parameters = Map("desiredType" -> "\"ARRAY\"", "dataType" -> "\"INT\""),
-      sqlState = None)
+      parameters = Map("desiredType" -> "\"ARRAY\"", "dataType" -> "\"INT\""))
   }
 
   test("UNSUPPORTED_DESERIALIZER: " +
@@ -571,8 +548,7 @@ class QueryCompilationErrorsSuite
       errorClass = "UNSUPPORTED_DESERIALIZER",
       errorSubClass = Some("FIELD_NUMBER_MISMATCH"),
       parameters = Map("schema" -> "\"STRUCT<a: STRING, b: INT>\"",
-        "ordinal" -> "3"),
-      sqlState = None)
+        "ordinal" -> "3"))
 
     val e2 = intercept[AnalysisException] {
       ds.as[Tuple1[String]]
@@ -582,8 +558,7 @@ class QueryCompilationErrorsSuite
       errorClass = "UNSUPPORTED_DESERIALIZER",
       errorSubClass = Some("FIELD_NUMBER_MISMATCH"),
       parameters = Map("schema" -> "\"STRUCT<a: STRING, b: INT>\"",
-        "ordinal" -> "1"),
-      sqlState = None)
+        "ordinal" -> "1"))
   }
 
   test("UNSUPPORTED_GENERATOR: " +
@@ -596,8 +571,7 @@ class QueryCompilationErrorsSuite
       exception = e,
       errorClass = "UNSUPPORTED_GENERATOR",
       errorSubClass = Some("NESTED_IN_EXPRESSIONS"),
-      parameters = Map("expression" -> "\"(explode(array(1, 2, 3)) + 1)\""),
-      sqlState = None)
+      parameters = Map("expression" -> "\"(explode(array(1, 2, 3)) + 1)\""))
   }
 
   test("UNSUPPORTED_GENERATOR: only one generator allowed") {
@@ -610,8 +584,7 @@ class QueryCompilationErrorsSuite
       errorClass = "UNSUPPORTED_GENERATOR",
       errorSubClass = Some("MULTI_GENERATOR"),
       parameters = Map("clause" -> "SELECT", "num" -> "2",
-        "generators" -> "\"explode(array(1, 2, 3))\", \"explode(array(1, 2, 
3))\""),
-      sqlState = None)
+        "generators" -> "\"explode(array(1, 2, 3))\", \"explode(array(1, 2, 
3))\""))
   }
 
   test("UNSUPPORTED_GENERATOR: generators are not supported outside the SELECT 
clause") {
@@ -623,8 +596,7 @@ class QueryCompilationErrorsSuite
       exception = e,
       errorClass = "UNSUPPORTED_GENERATOR",
       errorSubClass = Some("OUTSIDE_SELECT"),
-      parameters = Map("plan" -> "'Sort [explode(array(1, 2, 3)) ASC NULLS 
FIRST], true"),
-      sqlState = None)
+      parameters = Map("plan" -> "'Sort [explode(array(1, 2, 3)) ASC NULLS 
FIRST], true"))
   }
 
   test("UNSUPPORTED_GENERATOR: not a generator") {
@@ -636,14 +608,13 @@ class QueryCompilationErrorsSuite
           |LATERAL VIEW array_contains(value, 1) AS 
explodedvalue""".stripMargin).collect()
     )
 
-    checkErrorClass(
+    checkError(
       exception = e,
       errorClass = "UNSUPPORTED_GENERATOR",
       errorSubClass = Some("NOT_GENERATOR"),
-      msg = """The generator is not supported: `array_contains` is expected to 
be a generator. """ +
-        "However, its class is 
org.apache.spark.sql.catalyst.expressions.ArrayContains, " +
-        "which is not a generator.; line 4 pos 0"
-    )
+      parameters = Map(
+        "functionName" -> "`array_contains`",
+        "classCanonicalName" -> 
"org.apache.spark.sql.catalyst.expressions.ArrayContains"))
   }
 }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
index d78a6a91959..ee13f15ad71 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
@@ -22,27 +22,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.test.SharedSparkSession
 
 trait QueryErrorsSuiteBase extends SharedSparkSession {
-  def checkErrorClass(
-      exception: Exception with SparkThrowable,
-      errorClass: String,
-      errorSubClass: Option[String] = None,
-      msg: String,
-      sqlState: Option[String] = None,
-      matchMsg: Boolean = false): Unit = {
-    assert(exception.getErrorClass === errorClass)
-    sqlState.foreach(state => exception.getSqlState === state)
-    val fullErrorClass = if (errorSubClass.isDefined) {
-      errorClass + "." + errorSubClass.get
-    } else {
-     errorClass
-    }
-    if (matchMsg) {
-      assert(exception.getMessage.matches(s"""\\[$fullErrorClass\\] """ + msg),
-        "exception is: " + exception.getMessage)
-    } else {
-      assert(exception.getMessage === s"""[$fullErrorClass] """ + msg)
-    }
-  }
 
   def validateParsingError(
       sqlText: String,
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index 36349c5e1f2..d9d76f6567e 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -40,21 +40,13 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with 
QueryErrorsSuiteBase
   }
 
   test("DIVIDE_BY_ZERO: can't divide an integer by zero") {
-    checkErrorClass(
+    checkError(
       exception = intercept[SparkArithmeticException] {
         sql("select 6/0").collect()
       },
       errorClass = "DIVIDE_BY_ZERO",
-      msg =
-        "Division by zero. Use `try_divide` to tolerate divisor being 0 and 
return NULL instead. " +
-          "If necessary set " +
-        s"""$ansiConf to "false" to bypass this error.""" +
-        """
-          |== SQL(line 1, position 8) ==
-          |select 6/0
-          |       ^^^
-          |""".stripMargin,
-      sqlState = Some("22012"))
+      sqlState = "22012",
+      parameters = Map("config" -> ansiConf))
   }
 
   test("INTERVAL_DIVIDED_BY_ZERO: interval divided by zero") {
@@ -73,25 +65,22 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with 
QueryErrorsSuiteBase
         sql("select make_timestamp(2012, 11, 30, 9, 19, 
60.66666666)").collect()
       },
       errorClass = "INVALID_FRACTION_OF_SECOND",
-      parameters = Map("ansiConfig" -> ansiConf),
-      sqlState = "22023")
+      sqlState = "22023",
+      parameters = Map("ansiConfig" -> ansiConf))
   }
 
   test("CANNOT_CHANGE_DECIMAL_PRECISION: cast string to decimal") {
-    checkErrorClass(
+    checkError(
       exception = intercept[SparkArithmeticException] {
         sql("select CAST('66666666666666.666' AS DECIMAL(8, 1))").collect()
       },
       errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION",
-      msg =
-        "Decimal(expanded, 66666666666666.666, 17, 3) cannot be represented as 
Decimal(8, 1). " +
-        s"""If necessary set $ansiConf to "false" to bypass this error.""" +
-        """
-          |== SQL(line 1, position 8) ==
-          |select CAST('66666666666666.666' AS DECIMAL(8, 1))
-          |       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-          |""".stripMargin,
-      sqlState = Some("22005"))
+      sqlState = "22005",
+      parameters = Map(
+        "value" -> "Decimal(expanded, 66666666666666.666, 17, 3)",
+        "precision" -> "8",
+        "scale" -> "1",
+        "config" -> ansiConf))
   }
 
   test("INVALID_ARRAY_INDEX: get element from array") {
@@ -115,38 +104,27 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest 
with QueryErrorsSuiteBase
   }
 
   test("MAP_KEY_DOES_NOT_EXIST: key does not exist in element_at") {
-    val e = intercept[SparkNoSuchElementException] {
-      sql("select element_at(map(1, 'a', 2, 'b'), 3)").collect()
-    }
-    checkErrorClass(
-      exception = e,
+    checkError(
+      exception = intercept[SparkNoSuchElementException] {
+        sql("select element_at(map(1, 'a', 2, 'b'), 3)").collect()
+      },
       errorClass = "MAP_KEY_DOES_NOT_EXIST",
-      msg = "Key 3 does not exist. Use `try_element_at` to tolerate 
non-existent key and return " +
-        "NULL instead. " +
-        s"""If necessary set $ansiConf to "false" to bypass this error.""" +
-        """
-          |== SQL(line 1, position 8) ==
-          |select element_at(map(1, 'a', 2, 'b'), 3)
-          |       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-          |""".stripMargin
-    )
+      parameters = Map(
+        "keyValue" -> "3",
+        "config" -> ansiConf))
   }
 
   test("CAST_INVALID_INPUT: cast string to double") {
-    checkErrorClass(
+    checkError(
       exception = intercept[SparkNumberFormatException] {
         sql("select CAST('111111111111xe23' AS DOUBLE)").collect()
       },
       errorClass = "CAST_INVALID_INPUT",
-      msg = """The value '111111111111xe23' of the type "STRING" cannot be 
cast to "DOUBLE" """ +
-        "because it is malformed. Correct the value as per the syntax, " +
-        "or change its target type. Use `try_cast` to tolerate malformed input 
and return " +
-        "NULL instead. If necessary set " +
-        s"""$ansiConf to \"false\" to bypass this error.
-          |== SQL(line 1, position 8) ==
-          |select CAST('111111111111xe23' AS DOUBLE)
-          |       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-          |""".stripMargin)
+      parameters = Map(
+        "expression" -> "'111111111111xe23'",
+        "sourceType" -> "\"STRING\"",
+        "targetType" -> "\"DOUBLE\"",
+        "ansiConfig" -> ansiConf))
   }
 
   test("CANNOT_PARSE_TIMESTAMP: parse string to timestamp") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index c540f90a9f5..6cfe59c71f4 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -392,11 +392,9 @@ class QueryExecutionErrorsSuite
     checkError(
       exception = e1.getCause.asInstanceOf[SparkException],
       errorClass = "FAILED_EXECUTE_UDF",
-      errorSubClass = None,
       parameters = Map("functionName" -> 
"QueryExecutionErrorsSuite\\$\\$Lambda\\$\\d+/\\w+",
         "signature" -> "string, int",
         "result" -> "string"),
-      sqlState = None,
       matchPVals = true)
   }
 
@@ -469,11 +467,9 @@ class QueryExecutionErrorsSuite
           checkError(
             exception = e.getCause.asInstanceOf[SparkSecurityException],
             errorClass = "RESET_PERMISSION_TO_ORIGINAL",
-            errorSubClass = None,
             parameters = Map("permission" -> ".+",
               "path" -> ".+",
               "message" -> ".+"),
-            sqlState = None,
             matchPVals = true)
       }
     }
@@ -605,9 +601,7 @@ class QueryExecutionErrorsSuite
           aggregated.count()
         },
         errorClass = "INVALID_BUCKET_FILE",
-        errorSubClass = None,
         parameters = Map("path" -> ".+"),
-        sqlState = None,
         matchPVals = true)
     }
   }
@@ -619,7 +613,6 @@ class QueryExecutionErrorsSuite
         sql("select (select a from (select 1 as a union all select 2 as a) t) 
as b").collect()
       },
       errorClass = "MULTI_VALUE_SUBQUERY_ERROR",
-      errorSubClass = None,
       parameters = Map("plan" ->
           """Subquery subquery#\w+, \[id=#\w+\]
             |\+\- AdaptiveSparkPlan isFinalPlan=true
@@ -636,7 +629,6 @@ class QueryExecutionErrorsSuite
             |      \+\- Project \[\w+ AS a#\w+\]
             |         \+\- Scan OneRowRelation\[\]
             |""".stripMargin),
-      sqlState = None,
       matchPVals = true)
   }
 
@@ -646,7 +638,7 @@ class QueryExecutionErrorsSuite
         sql("select element_at(array(1, 2, 3, 4, 5), 0)").collect()
       ),
       errorClass = "ELEMENT_AT_BY_INDEX_ZERO",
-      Map.empty
+      parameters = Map.empty
     )
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to