This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3da52fb4490e [SPARK-47798][SQL] Enrich the error message for the 
reading failures of decimal values
3da52fb4490e is described below

commit 3da52fb4490e2520ab22291742781826d0976e08
Author: Kent Yao <y...@apache.org>
AuthorDate: Thu Apr 11 12:03:37 2024 +0800

    [SPARK-47798][SQL] Enrich the error message for the reading failures of 
decimal values
    
    ### What changes were proposed in this pull request?
    
    When parsing/reading a decimal column/field from json, jdbc, etc., if the 
column or field contains some values exceeding the excepted precision, we only 
get a simple error like `Decimal precision <precision> exceeds max precision 
<maxPrecision>.`. This kind of message is sufficient for finding issues when 
dealing with literals or DDL schemas, but insufficient for reading or parsing 
values for storage systems.
    
    This PR changes the message to `The <roundedValue> rounded half up from 
<originalValue> cannot be represented as Decimal(<precision>, <scale>).`
    
    The `roundedValue` represents the value after applying the `scale`, as we 
do HALF_UP, the integral length might be different from the original values.
    
    ### Why are the changes needed?
    
    - Currently, we use DataTypeErrors instead of QueryExecutionErrors for 
Decimal.apply which is used extensively on the execution side.
    - Enrich the error message for better debugging experience
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, error class and message changes
    
    ### How was this patch tested?
    
    new tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #45981 from yaooqinn/SPARK-47798.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../src/main/resources/error/error-classes.json    | 14 +++++++-
 .../spark/sql/avro/AvroLogicalTypeSuite.scala      |  2 +-
 .../spark/sql/jdbc/OracleIntegrationSuite.scala    |  4 ++-
 ...tions-numeric-value-out-of-range-error-class.md | 41 ++++++++++++++++++++++
 docs/sql-error-conditions.md                       |  6 ++--
 .../apache/spark/sql/errors/DataTypeErrors.scala   |  2 +-
 .../scala/org/apache/spark/sql/types/Decimal.scala | 11 ++++--
 .../spark/sql/errors/QueryExecutionErrors.scala    |  2 +-
 .../org/apache/spark/sql/types/DecimalSuite.scala  | 22 ++++++++----
 .../resources/sql-tests/results/ansi/cast.sql.out  |  4 +--
 .../ansi/decimalArithmeticOperations.sql.out       | 20 +++++------
 .../sql-tests/results/ansi/interval.sql.out        |  2 +-
 .../test/resources/sql-tests/results/cast.sql.out  |  2 +-
 .../sql/errors/QueryExecutionAnsiErrorsSuite.scala |  4 +--
 14 files changed, 103 insertions(+), 33 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 45a1ec5e1e84..62581116000b 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -3271,8 +3271,20 @@
   },
   "NUMERIC_VALUE_OUT_OF_RANGE" : {
     "message" : [
-      "<value> cannot be represented as Decimal(<precision>, <scale>). If 
necessary set <config> to \"false\" to bypass this error, and return NULL 
instead."
+      ""
     ],
+    "subClass" : {
+      "WITHOUT_SUGGESTION" : {
+        "message" : [
+          "The <roundedValue> rounded half up from <originalValue> cannot be 
represented as Decimal(<precision>, <scale>)."
+        ]
+      },
+      "WITH_SUGGESTION" : {
+        "message" : [
+          "<value> cannot be represented as Decimal(<precision>, <scale>). If 
necessary set <config> to \"false\" to bypass this error, and return NULL 
instead."
+        ]
+      }
+    },
     "sqlState" : "22003"
   },
   "NUM_COLUMNS_MISMATCH" : {
diff --git 
a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroLogicalTypeSuite.scala
 
b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroLogicalTypeSuite.scala
index 0a6ebb9e0a7f..429f3c0deca6 100644
--- 
a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroLogicalTypeSuite.scala
+++ 
b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroLogicalTypeSuite.scala
@@ -439,7 +439,7 @@ abstract class AvroLogicalTypeSuite extends QueryTest with 
SharedSparkSession {
       assert(ex.getErrorClass.startsWith("FAILED_READ_FILE"))
       checkError(
         exception = ex.getCause.asInstanceOf[SparkArithmeticException],
-        errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+        errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
         parameters = Map(
           "value" -> "0",
           "precision" -> "4",
diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
index d2b86d7cfea6..c000b0e123be 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
@@ -355,7 +355,9 @@ class OracleIntegrationSuite extends 
DockerJDBCIntegrationSuite with SharedSpark
     val e = intercept[org.apache.spark.SparkArithmeticException] {
       spark.read.jdbc(jdbcUrl, "tableWithCustomSchema", new 
Properties()).collect()
     }
-    assert(e.getMessage.contains("Decimal precision 39 exceeds max precision 
38"))
+    assert(e.getMessage.contains(
+      "The 12312321321321312312312312123.0000000000 rounded half up from" +
+        " 12312321321321312312312312123 cannot be represented as Decimal(38, 
10)"))
 
     // custom schema can read data
     val props = new Properties()
diff --git 
a/docs/sql-error-conditions-numeric-value-out-of-range-error-class.md 
b/docs/sql-error-conditions-numeric-value-out-of-range-error-class.md
new file mode 100644
index 000000000000..690bbeec0747
--- /dev/null
+++ b/docs/sql-error-conditions-numeric-value-out-of-range-error-class.md
@@ -0,0 +1,41 @@
+---
+layout: global
+title: NUMERIC_VALUE_OUT_OF_RANGE error class
+displayTitle: NUMERIC_VALUE_OUT_OF_RANGE error class
+license: |
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+---
+
+<!--
+  DO NOT EDIT THIS FILE.
+  It was generated automatically by `org.apache.spark.SparkThrowableSuite`.
+-->
+
+[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception)
+
+
+
+This error class has the following derived error classes:
+
+## WITHOUT_SUGGESTION
+
+The `<roundedValue>` rounded half up from `<originalValue>` cannot be 
represented as Decimal(`<precision>`, `<scale>`).
+
+## WITH_SUGGESTION
+
+`<value>` cannot be represented as Decimal(`<precision>`, `<scale>`). If 
necessary set `<config>` to "false" to bypass this error, and return NULL 
instead.
+
+
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index bb25a4c7f9f0..827ee04b7606 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -1873,11 +1873,13 @@ Execute immediate requires a non-null variable as the 
query string, but the prov
 
 The value `<value>` cannot be interpreted as a numeric since it has more than 
38 digits.
 
-### NUMERIC_VALUE_OUT_OF_RANGE
+### 
[NUMERIC_VALUE_OUT_OF_RANGE](sql-error-conditions-numeric-value-out-of-range-error-class.html)
 
 [SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception)
 
-`<value>` cannot be represented as Decimal(`<precision>`, `<scale>`). If 
necessary set `<config>` to "false" to bypass this error, and return NULL 
instead.
+
+
+For more details see 
[NUMERIC_VALUE_OUT_OF_RANGE](sql-error-conditions-numeric-value-out-of-range-error-class.html)
 
 ### NUM_COLUMNS_MISMATCH
 
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
index b74d98ff1899..63413b69f210 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
@@ -207,7 +207,7 @@ private[sql] object DataTypeErrors extends 
DataTypeErrorsBase {
       decimalScale: Int,
       context: QueryContext): ArithmeticException = {
     new SparkArithmeticException(
-      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
       messageParameters = Map(
         "value" -> value.toPlainString,
         "precision" -> decimalPrecision.toString,
diff --git a/sql/api/src/main/scala/org/apache/spark/sql/types/Decimal.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 0bcbefaa5482..6de8570b1422 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -21,7 +21,7 @@ import java.math.{BigDecimal => JavaBigDecimal, BigInteger, 
MathContext, Roundin
 
 import scala.util.Try
 
-import org.apache.spark.QueryContext
+import org.apache.spark.{QueryContext, SparkArithmeticException}
 import org.apache.spark.annotation.Unstable
 import org.apache.spark.sql.errors.DataTypeErrors
 import org.apache.spark.sql.internal.SqlApiConf
@@ -120,8 +120,13 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
     DecimalType.checkNegativeScale(scale)
     this.decimalVal = decimal.setScale(scale, ROUND_HALF_UP)
     if (decimalVal.precision > precision) {
-      throw DataTypeErrors.decimalPrecisionExceedsMaxPrecisionError(
-        decimalVal.precision, precision)
+      throw new SparkArithmeticException(
+        errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITHOUT_SUGGESTION",
+        messageParameters = Map(
+          "roundedValue" -> decimalVal.toString,
+          "originalValue" -> decimal.toString,
+          "precision" -> precision.toString,
+          "scale" -> scale.toString), Array.empty)
     }
     this.longVal = 0L
     this._precision = precision
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 9ad51b9552c4..ceb90fe6bea5 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -106,7 +106,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
       decimalScale: Int,
       context: QueryContext = null): ArithmeticException = {
     new SparkArithmeticException(
-      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
       messageParameters = Map(
         "value" -> value.toPlainString,
         "precision" -> decimalPrecision.toString,
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index ab3f831fbcbd..e6d915903f9b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -65,7 +65,7 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester with SQLHelper
 
     checkError(
       exception = intercept[SparkArithmeticException](Decimal(170L, 2, 1)),
-      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
       parameters = Map(
         "value" -> "0",
         "precision" -> "2",
@@ -73,7 +73,7 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester with SQLHelper
         "config" -> "\"spark.sql.ansi.enabled\""))
     checkError(
       exception = intercept[SparkArithmeticException](Decimal(170L, 2, 0)),
-      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
       parameters = Map(
         "value" -> "0",
         "precision" -> "2",
@@ -81,15 +81,23 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester with SQLHelper
         "config" -> "\"spark.sql.ansi.enabled\""))
     checkError(
       exception = 
intercept[SparkArithmeticException](Decimal(BigDecimal("10.030"), 2, 1)),
-      errorClass = "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
-      parameters = Map("precision" -> "3", "maxPrecision" -> "2"))
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITHOUT_SUGGESTION",
+      parameters = Map(
+        "roundedValue" -> "10.0",
+        "originalValue" -> "10.030",
+        "precision" -> "2",
+        "scale" -> "1"))
     checkError(
       exception = 
intercept[SparkArithmeticException](Decimal(BigDecimal("-9.95"), 2, 1)),
-      errorClass = "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
-      parameters = Map("precision" -> "3", "maxPrecision" -> "2"))
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITHOUT_SUGGESTION",
+      parameters = Map(
+        "roundedValue" -> "-10.0",
+        "originalValue" -> "-9.95",
+        "precision" -> "2",
+        "scale" -> "1"))
     checkError(
       exception = intercept[SparkArithmeticException](Decimal(1e17.toLong, 17, 
0)),
-      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
       parameters = Map(
         "value" -> "0",
         "precision" -> "17",
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index 4a60af0411aa..7dd7180165f2 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -1146,7 +1146,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -1611,7 +1611,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
index 9593291fae21..fd30ecf20302 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out
@@ -74,7 +74,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -99,7 +99,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -124,7 +124,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -149,7 +149,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -198,7 +198,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -223,7 +223,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -248,7 +248,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -273,7 +273,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -298,7 +298,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
@@ -331,7 +331,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 22fa0213995f..4e220ba9885c 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -879,7 +879,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
index 8536a564d80e..6f74c63da354 100644
--- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
@@ -873,7 +873,7 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 {
-  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE",
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
   "sqlState" : "22003",
   "messageParameters" : {
     "config" : "\"spark.sql.ansi.enabled\"",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index a0b4d345628e..83495e067024 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -110,7 +110,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
       exception = intercept[SparkArithmeticException] {
         sql("select CAST('66666666666666.666' AS DECIMAL(8, 1))").collect()
       },
-      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
       sqlState = "22003",
       parameters = Map(
         "value" -> "66666666666666.666",
@@ -126,7 +126,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
       exception = intercept[SparkArithmeticException] {
         OneRowRelation().select(lit("66666666666666.666").cast("DECIMAL(8, 
1)")).collect()
       },
-      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE",
+      errorClass = "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
       sqlState = "22003",
       parameters = Map(
         "value" -> "66666666666666.666",


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to