This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 6c009180a75 [SPARK-37945][SQL][CORE] Use error classes in the
execution errors of arithmetic ops
6c009180a75 is described below
commit 6c009180a75ae8e548ef4395211b13ee25ab60a9
Author: Khalid Mammadov
AuthorDate: Sun Oct 23 11:44:49 2022 +0500
[SPARK-37945][SQL][CORE] Use error classes in the execution errors of
arithmetic ops
### What changes were proposed in this pull request?
Migrate the following errors in QueryExecutionErrors onto use error classes:
unscaledValueTooLargeForPrecisionError ->
UNSCALED_VALUE_TOO_LARGE_FOR_PRECISION
decimalPrecisionExceedsMaxPrecisionError ->
DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION
integerOverflowError -> INTEGER_OVERFLOW
outOfDecimalTypeRangeError -> OUT_OF_DECIMAL_TYPE_RANGE
### Why are the changes needed?
Porting ArithmeticExceptions to the new error framework
### Does this PR introduce _any_ user-facing change?
Yes, errors will indicate that it's controlled Spark exception
### How was this patch tested?
./build/sbt "catalyst/testOnly org.apache.spark.sql.types.DecimalSuite"
./build/sbt "sql/testOnly
org.apache.spark.sql.execution.streaming.sources.RateStreamProviderSuite"
./build/sbt "core/testOnly testOnly org.apache.spark.SparkThrowableSuite"
Closes #38273 from khalidmammadov/error_class2.
Lead-authored-by: Khalid Mammadov
Co-authored-by: khalidmammadov
Signed-off-by: Max Gekk
---
core/src/main/resources/error/error-classes.json | 40 ++--
.../spark/sql/errors/QueryExecutionErrors.scala| 41 +
.../catalyst/expressions/CastWithAnsiOnSuite.scala | 4 +-
.../org/apache/spark/sql/types/DecimalSuite.scala | 53 +-
.../sources/RateStreamMicroBatchStream.scala | 8 ++--
.../sources/RateStreamProviderSuite.scala | 44 +-
6 files changed, 145 insertions(+), 45 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 240cf5f4eea..5f4db145479 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -276,6 +276,11 @@
],
"sqlState" : "22008"
},
+ "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : {
+"message" : [
+ "Decimal precision exceeds max precision ."
+]
+ },
"DEFAULT_DATABASE_NOT_EXISTS" : {
"message" : [
"Default database does not exist, please create it
first or change default database to 'default'."
@@ -416,6 +421,16 @@
}
}
},
+ "INCORRECT_END_OFFSET" : {
+"message" : [
+ "Max offset with rowsPerSecond is , but it's
now."
+]
+ },
+ "INCORRECT_RUMP_UP_RATE" : {
+"message" : [
+ "Max offset with rowsPerSecond is , but
'rampUpTimeSeconds' is ."
+]
+ },
"INDEX_ALREADY_EXISTS" : {
"message" : [
"Cannot create the index because it already exists. ."
@@ -605,6 +620,11 @@
],
"sqlState" : "22005"
},
+ "OUT_OF_DECIMAL_TYPE_RANGE" : {
+"message" : [
+ "Out of decimal type range: ."
+]
+ },
"PARSE_CHAR_MISSING_LENGTH" : {
"message" : [
"DataType requires a length parameter, for example (10).
Please specify the length."
@@ -814,6 +834,11 @@
},
"sqlState" : "42000"
},
+ "UNSCALED_VALUE_TOO_LARGE_FOR_PRECISION" : {
+"message" : [
+ "Unscaled value too large for precision. If necessary set
to false to bypass this error."
+]
+ },
"UNSUPPORTED_DATATYPE" : {
"message" : [
"Unsupported data type "
@@ -3707,21 +3732,6 @@
"Unexpected: "
]
},
- "_LEGACY_ERROR_TEMP_2117" : {
-"message" : [
- "Unscaled value too large for precision. If necessary set
to false to bypass this error."
-]
- },
- "_LEGACY_ERROR_TEMP_2118" : {
-"message" : [
- "Decimal precision exceeds max precision "
-]
- },
- "_LEGACY_ERROR_TEMP_2119" : {
-"message" : [
- "out of decimal type range: "
-]
- },
"_LEGACY_ERROR_TEMP_2120" : {
"message" : [
"Do not support array of type ."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 5edffc87b84..4aedfb3b03d 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1260,8 +1260,9 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase {
def unscaledValueTooLargeForPrecisionError(): SparkArithmeticException = {
new SparkArithmeticExce