This is an automated email from the ASF dual-hosted git repository.
hvanhovell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 8138db757764 [SPARK-54887][CONNECT] Add optional sqlstate to
sparkthrowables
8138db757764 is described below
commit 8138db757764427ac59199802d92ee302b90129c
Author: Garland Zhang <[email protected]>
AuthorDate: Fri Jan 9 09:59:14 2026 -0400
[SPARK-54887][CONNECT] Add optional sqlstate to sparkthrowables
### What changes were proposed in this pull request?
Spark connect client cannot properly read the error details when the server
has a new error. This is because it relies on an internal ErrorClassReader
which pulls from a local json file that contains the error details at that
time. However this json file can become out of date when the server throws a
new error.
This PR ensures all exceptions will contain an optional sql state property
and override the getSqlState to method to first check if the sql state is
defined, otherwise use the original functionality (i.e. read from json file).
To ensure backward compatibility, all sql states are initially None.
We will also make a client side change in
https://github.com/apache/spark/pull/52589 so that when client receives an
exception, it will read the sql state first when reconstructing the exception
and populate that exceptions field. Now older clients can carry sql state
information for newer servers!
### Why are the changes needed?
### Does this PR introduce _any_ user-facing change?
### How was this patch tested?
### Was this patch authored or co-authored using generative AI tooling?
Absolutely. Generated-by: Claude
Closes #52696 from garlandz-db/add-optional-sqlstate-to-sparkthrowables.
Authored-by: Garland Zhang <[email protected]>
Signed-off-by: Herman van Hövell <[email protected]>
---
.../main/java/org/apache/spark/SparkThrowable.java | 5 +
.../scala/org/apache/spark/SparkException.scala | 112 ++++++++++++++++----
.../sql/streaming/StreamingQueryException.scala | 5 +-
.../spark/sql/kafka010/KafkaExceptions.scala | 10 +-
.../apache/spark/memory/SparkOutOfMemoryError.java | 12 +++
.../spark/SparkFileAlreadyExistsException.scala | 5 +-
.../org/apache/spark/SparkThrowableSuite.scala | 113 +++++++++++++++++++++
.../sql/exceptions/SqlScriptingException.scala | 6 +-
.../sql/connect/common/InvalidPlanInput.scala | 5 +-
9 files changed, 245 insertions(+), 28 deletions(-)
diff --git a/common/utils/src/main/java/org/apache/spark/SparkThrowable.java
b/common/utils/src/main/java/org/apache/spark/SparkThrowable.java
index 26d66ae3433a..463197ef46fb 100644
--- a/common/utils/src/main/java/org/apache/spark/SparkThrowable.java
+++ b/common/utils/src/main/java/org/apache/spark/SparkThrowable.java
@@ -51,6 +51,11 @@ public interface SparkThrowable {
// Portable error identifier across SQL engines
// If null, error class or SQLSTATE is not set
+ /**
+ * @deprecated Override this method to provide a custom SQL state.
+ * The default implementation uses the error class reader.
+ */
+ @Deprecated
default String getSqlState() {
return SparkThrowableHelper.getSqlState(this.getCondition());
}
diff --git a/common/utils/src/main/scala/org/apache/spark/SparkException.scala
b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
index f438c6225347..fc4b3e50be4b 100644
--- a/common/utils/src/main/scala/org/apache/spark/SparkException.scala
+++ b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
@@ -29,7 +29,8 @@ class SparkException(
cause: Throwable,
errorClass: Option[String],
messageParameters: Map[String, String],
- context: Array[QueryContext] = Array.empty)
+ context: Array[QueryContext] = Array.empty,
+ sqlState: Option[String] = None)
extends Exception(message, cause) with SparkThrowable {
def this(message: String, cause: Throwable) =
@@ -38,6 +39,15 @@ class SparkException(
def this(message: String) =
this(message = message, cause = null)
+ def this(
+ message: String,
+ cause: Throwable,
+ errorClass: Option[String],
+ messageParameters: Map[String, String],
+ context: Array[QueryContext]
+ ) =
+ this(message, cause, errorClass, messageParameters, context, None)
+
def this(
errorClass: String,
messageParameters: Map[String, String],
@@ -71,6 +81,8 @@ class SparkException(
override def getCondition: String = errorClass.orNull
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -170,7 +182,8 @@ private[spark] class SparkUpgradeException private(
message: String,
cause: Option[Throwable],
errorClass: Option[String],
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends RuntimeException(message, cause.orNull) with SparkThrowable {
def this(
@@ -188,6 +201,8 @@ private[spark] class SparkUpgradeException private(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
/**
@@ -197,7 +212,8 @@ private[spark] class SparkArithmeticException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
- context: Array[QueryContext])
+ context: Array[QueryContext],
+ sqlState: Option[String] = None)
extends ArithmeticException(message) with SparkThrowable {
def this(
@@ -221,6 +237,9 @@ private[spark] class SparkArithmeticException private(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -230,7 +249,8 @@ private[spark] class SparkArithmeticException private(
private[spark] class SparkUnsupportedOperationException private(
message: String,
errorClass: Option[String],
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends UnsupportedOperationException(message) with SparkThrowable {
def this(
@@ -259,6 +279,8 @@ private[spark] class SparkUnsupportedOperationException
private(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
private[spark] object SparkUnsupportedOperationException {
@@ -281,7 +303,8 @@ private[spark] object SparkUnsupportedOperationException {
private[spark] class SparkClassNotFoundException(
errorClass: String,
messageParameters: Map[String, String],
- cause: Throwable = null)
+ cause: Throwable = null,
+ sqlState: Option[String] = None)
extends ClassNotFoundException(
SparkThrowableHelper.getMessage(errorClass, messageParameters), cause)
with SparkThrowable {
@@ -289,6 +312,8 @@ private[spark] class SparkClassNotFoundException(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
/**
@@ -297,7 +322,8 @@ private[spark] class SparkClassNotFoundException(
private[spark] class SparkConcurrentModificationException(
errorClass: String,
messageParameters: Map[String, String],
- cause: Throwable = null)
+ cause: Throwable = null,
+ sqlState: Option[String] = None)
extends ConcurrentModificationException(
SparkThrowableHelper.getMessage(errorClass, messageParameters), cause)
with SparkThrowable {
@@ -305,6 +331,8 @@ private[spark] class SparkConcurrentModificationException(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
/**
@@ -315,7 +343,8 @@ private[spark] class SparkDateTimeException private(
errorClass: Option[String],
messageParameters: Map[String, String],
context: Array[QueryContext],
- cause: Option[Throwable])
+ cause: Option[Throwable],
+ sqlState: Option[String] = None)
extends DateTimeException(message, cause.orNull) with SparkThrowable {
def this(
@@ -355,6 +384,9 @@ private[spark] class SparkDateTimeException private(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -363,7 +395,8 @@ private[spark] class SparkDateTimeException private(
*/
private[spark] class SparkFileNotFoundException(
errorClass: String,
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends FileNotFoundException(
SparkThrowableHelper.getMessage(errorClass, messageParameters))
with SparkThrowable {
@@ -371,6 +404,8 @@ private[spark] class SparkFileNotFoundException(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
/**
@@ -380,7 +415,8 @@ private[spark] class SparkNumberFormatException private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
- context: Array[QueryContext])
+ context: Array[QueryContext],
+ sqlState: Option[String] = None)
extends NumberFormatException(message)
with SparkThrowable {
@@ -405,6 +441,9 @@ private[spark] class SparkNumberFormatException private(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -416,7 +455,8 @@ private[spark] class SparkIllegalArgumentException private(
cause: Option[Throwable],
errorClass: Option[String],
messageParameters: Map[String, String],
- context: Array[QueryContext])
+ context: Array[QueryContext],
+ sqlState: Option[String] = None)
extends IllegalArgumentException(message, cause.orNull)
with SparkThrowable {
@@ -457,6 +497,9 @@ private[spark] class SparkIllegalArgumentException private(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -467,7 +510,8 @@ private[spark] class SparkIllegalStateException(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext] = Array.empty,
- cause: Throwable = null)
+ cause: Throwable = null,
+ sqlState: Option[String] = None)
extends IllegalStateException(
SparkThrowableHelper.getMessage(errorClass, messageParameters), cause)
with SparkThrowable {
@@ -476,6 +520,8 @@ private[spark] class SparkIllegalStateException(
override def getCondition: String = errorClass
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -484,7 +530,8 @@ private[spark] class SparkRuntimeException private(
cause: Option[Throwable],
errorClass: Option[String],
messageParameters: Map[String, String],
- context: Array[QueryContext])
+ context: Array[QueryContext],
+ sqlState: Option[String])
extends RuntimeException(message, cause.orNull) with SparkThrowable {
def this(
@@ -498,13 +545,17 @@ private[spark] class SparkRuntimeException private(
Option(cause),
Option(errorClass),
messageParameters,
- context
+ context,
+ None
)
}
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -513,7 +564,8 @@ private[spark] class SparkPythonException private(
cause: Option[Throwable],
errorClass: Option[String],
messageParameters: Map[String, String],
- context: Array[QueryContext])
+ context: Array[QueryContext],
+ sqlState: Option[String])
extends RuntimeException(message, cause.orNull) with SparkThrowable {
def this(
@@ -527,13 +579,17 @@ private[spark] class SparkPythonException private(
Option(cause),
Option(errorClass),
messageParameters,
- context
+ context,
+ None
)
}
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -544,7 +600,8 @@ private[spark] class SparkNoSuchElementException(
errorClass: String,
messageParameters: Map[String, String],
context: Array[QueryContext] = Array.empty,
- summary: String = "")
+ summary: String = "",
+ sqlState: Option[String] = None)
extends NoSuchElementException(
SparkThrowableHelper.getMessage(errorClass, messageParameters, summary))
with SparkThrowable {
@@ -553,6 +610,8 @@ private[spark] class SparkNoSuchElementException(
override def getCondition: String = errorClass
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -561,7 +620,8 @@ private[spark] class SparkNoSuchElementException(
*/
private[spark] class SparkSecurityException(
errorClass: String,
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends SecurityException(
SparkThrowableHelper.getMessage(errorClass, messageParameters))
with SparkThrowable {
@@ -569,6 +629,8 @@ private[spark] class SparkSecurityException(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
/**
@@ -578,7 +640,8 @@ private[spark] class SparkArrayIndexOutOfBoundsException
private(
message: String,
errorClass: Option[String],
messageParameters: Map[String, String],
- context: Array[QueryContext])
+ context: Array[QueryContext],
+ sqlState: Option[String] = None)
extends ArrayIndexOutOfBoundsException(message)
with SparkThrowable {
@@ -603,6 +666,9 @@ private[spark] class SparkArrayIndexOutOfBoundsException
private(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass.orNull
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getQueryContext: Array[QueryContext] = context
}
@@ -611,7 +677,8 @@ private[spark] class SparkArrayIndexOutOfBoundsException
private(
*/
private[spark] class SparkSQLException(
errorClass: String,
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends SQLException(
SparkThrowableHelper.getMessage(errorClass, messageParameters))
with SparkThrowable {
@@ -619,6 +686,8 @@ private[spark] class SparkSQLException(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
/**
@@ -626,7 +695,8 @@ private[spark] class SparkSQLException(
*/
private[spark] class SparkSQLFeatureNotSupportedException(
errorClass: String,
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends SQLFeatureNotSupportedException(
SparkThrowableHelper.getMessage(errorClass, messageParameters))
with SparkThrowable {
@@ -634,4 +704,6 @@ private[spark] class SparkSQLFeatureNotSupportedException(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
diff --git
a/common/utils/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala
b/common/utils/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala
index 1972ef05d875..b30260b8d6cc 100644
---
a/common/utils/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala
+++
b/common/utils/src/main/scala/org/apache/spark/sql/streaming/StreamingQueryException.scala
@@ -39,7 +39,8 @@ class StreamingQueryException private[sql](
val startOffset: String,
val endOffset: String,
errorClass: String,
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends Exception(message, cause) with SparkThrowable {
private[spark] def this(
@@ -86,5 +87,7 @@ class StreamingQueryException private[sql](
override def getCondition: String = errorClass
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
}
diff --git
a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaExceptions.scala
b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaExceptions.scala
index 156bb71d777d..9db9f69c54df 100644
---
a/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaExceptions.scala
+++
b/connector/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaExceptions.scala
@@ -242,14 +242,15 @@ object KafkaExceptions {
private[kafka010] class KafkaIllegalStateException(
errorClass: String,
messageParameters: Map[String, String],
- cause: Throwable = null)
+ cause: Throwable = null,
+ sqlState: Option[String] = None)
extends IllegalStateException(
KafkaExceptionsHelper.errorClassesJsonReader.getErrorMessage(
errorClass, messageParameters), cause)
with SparkThrowable {
override def getSqlState: String =
- KafkaExceptionsHelper.errorClassesJsonReader.getSqlState(errorClass)
+
sqlState.getOrElse(KafkaExceptionsHelper.errorClassesJsonReader.getSqlState(errorClass))
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
@@ -262,14 +263,15 @@ private[kafka010] class KafkaIllegalStateException(
private[kafka010] class KafkaIllegalArgumentException(
errorClass: String,
messageParameters: Map[String, String],
- cause: Throwable = null)
+ cause: Throwable = null,
+ sqlState: Option[String] = None)
extends IllegalArgumentException(
KafkaExceptionsHelper.errorClassesJsonReader.getErrorMessage(
errorClass, messageParameters), cause)
with SparkThrowable {
override def getSqlState: String =
- KafkaExceptionsHelper.errorClassesJsonReader.getSqlState(errorClass)
+
sqlState.getOrElse(KafkaExceptionsHelper.errorClassesJsonReader.getSqlState(errorClass))
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
diff --git
a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
index 0e35ebecfd27..d118603bb69d 100644
--- a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
+++ b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
@@ -31,11 +31,18 @@ import java.util.Map;
public final class SparkOutOfMemoryError extends OutOfMemoryError implements
SparkThrowable {
String errorClass;
Map<String, String> messageParameters;
+ String sqlState;
public SparkOutOfMemoryError(String errorClass, Map<String, String>
messageParameters) {
+ this(errorClass, messageParameters, null);
+ }
+
+ public SparkOutOfMemoryError(String errorClass, Map<String, String>
messageParameters,
+ String sqlState) {
super(SparkThrowableHelper.getMessage(errorClass, messageParameters));
this.errorClass = errorClass;
this.messageParameters = messageParameters;
+ this.sqlState = sqlState;
}
@Override
@@ -47,4 +54,9 @@ public final class SparkOutOfMemoryError extends
OutOfMemoryError implements Spa
public String getCondition() {
return errorClass;
}
+
+ @Override
+ public String getSqlState() {
+ return sqlState != null ? sqlState :
SparkThrowable.super.getSqlState();
+ }
}
diff --git
a/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala
b/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala
index 82a0261f32ae..30e82ecffca0 100644
--- a/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala
+++ b/core/src/main/scala/org/apache/spark/SparkFileAlreadyExistsException.scala
@@ -26,7 +26,8 @@ import org.apache.hadoop.fs.FileAlreadyExistsException
*/
private[spark] class SparkFileAlreadyExistsException(
errorClass: String,
- messageParameters: Map[String, String])
+ messageParameters: Map[String, String],
+ sqlState: Option[String] = None)
extends FileAlreadyExistsException(
SparkThrowableHelper.getMessage(errorClass, messageParameters))
with SparkThrowable {
@@ -34,4 +35,6 @@ private[spark] class SparkFileAlreadyExistsException(
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
}
diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
index 09e810fbf9ed..9612c7b3eb17 100644
--- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
@@ -687,4 +687,117 @@ class SparkThrowableSuite extends SparkFunSuite {
assert(result == "[TEST_CUSTOM_TEMPLATE] Custom error: " +
"something occurred with somewhere SQLSTATE: 42S01")
}
+
+ test("Custom SQL state takes precedence over error class reader -
SparkException") {
+ // Test with custom SQL state - should return the custom one
+ val exceptionWithCustomSqlState = new SparkException(
+ message = getMessage("CANNOT_PARSE_DECIMAL", Map.empty[String, String]),
+ cause = null,
+ errorClass = Some("CANNOT_PARSE_DECIMAL"),
+ messageParameters = Map.empty[String, String],
+ context = Array.empty,
+ sqlState = Some("CUSTOM"))
+
+ assert(exceptionWithCustomSqlState.getSqlState == "CUSTOM",
+ "Custom SQL state should take precedence")
+
+ // Test without custom SQL state - should fall back to error class reader
+ val exceptionWithoutCustomSqlState = new SparkException(
+ message = getMessage("CANNOT_PARSE_DECIMAL", Map.empty[String, String]),
+ cause = null,
+ errorClass = Some("CANNOT_PARSE_DECIMAL"),
+ messageParameters = Map.empty[String, String],
+ context = Array.empty,
+ sqlState = None)
+
+ assert(exceptionWithoutCustomSqlState.getSqlState == "22018",
+ "Should fall back to error class reader SQL state")
+ }
+
+ test("SparkArithmeticException uses error class reader for SQL state") {
+ // Test that SparkArithmeticException falls back to error class reader
+ val exception = new SparkArithmeticException(
+ errorClass = "DIVIDE_BY_ZERO",
+ messageParameters = Map("config" -> "CONFIG"),
+ context = Array.empty,
+ summary = "")
+
+ assert(exception.getSqlState == "22012",
+ "Should use error class reader SQL state")
+ }
+
+ test("SparkRuntimeException uses error class reader for SQL state") {
+ // Test that SparkRuntimeException falls back to error class reader
+ val exception = new SparkRuntimeException(
+ errorClass = "INTERNAL_ERROR",
+ messageParameters = Map("message" -> "test"))
+
+ assert(exception.getSqlState.startsWith("XX"),
+ "Should use error class reader SQL state")
+ }
+
+ test("SparkIllegalArgumentException uses error class reader for SQL state") {
+ // Test that SparkIllegalArgumentException falls back to error class reader
+ val exception = new SparkIllegalArgumentException(
+ errorClass = "UNSUPPORTED_SAVE_MODE.EXISTENT_PATH",
+ messageParameters = Map("saveMode" -> "TEST"))
+
+ assert(exception.getSqlState == "0A000",
+ "Should use error class reader SQL state")
+ }
+
+ test("Custom SQL state takes precedence - Multiple exception types") {
+ // SparkSQLException
+ val sqlException = new SparkSQLException(
+ errorClass = "CANNOT_PARSE_DECIMAL",
+ messageParameters = Map.empty[String, String],
+ sqlState = Some("CUST1"))
+ assert(sqlException.getSqlState == "CUST1")
+
+ // SparkSecurityException
+ val securityException = new SparkSecurityException(
+ errorClass = "CANNOT_PARSE_DECIMAL",
+ messageParameters = Map.empty[String, String],
+ sqlState = Some("CUST2"))
+ assert(securityException.getSqlState == "CUST2")
+
+ // SparkNumberFormatException
+ val numberFormatException = new SparkNumberFormatException(
+ errorClass = "CANNOT_PARSE_DECIMAL",
+ messageParameters = Map.empty[String, String],
+ context = Array.empty,
+ summary = "")
+ assert(numberFormatException.getSqlState == "22018",
+ "Should use error class reader SQL state when custom not provided")
+ }
+
+ test("Custom SQL state takes precedence - Java exception
(SparkOutOfMemoryError)") {
+ import org.apache.spark.memory.SparkOutOfMemoryError
+
+ // Test without custom SQL state - should fall back to error class reader
+ val errorWithoutCustom = new SparkOutOfMemoryError(
+ "CANNOT_PARSE_DECIMAL",
+ Map.empty[String, String].asJava)
+
+ assert(errorWithoutCustom.getSqlState == "22018",
+ "Should use error class reader SQL state when custom not provided")
+
+ // Test with custom SQL state - should return the custom one
+ val errorWithCustom = new SparkOutOfMemoryError(
+ "CANNOT_PARSE_DECIMAL",
+ Map.empty[String, String].asJava,
+ "CUSTOM")
+
+ assert(errorWithCustom.getSqlState == "CUSTOM",
+ "Custom SQL state should take precedence over error class reader")
+
+ // Test with null custom SQL state - should fall back to error class reader
+ val errorWithNull = new SparkOutOfMemoryError(
+ "CANNOT_PARSE_DECIMAL",
+ Map.empty[String, String].asJava,
+ null)
+
+ assert(errorWithNull.getSqlState == "22018",
+ "Should fall back to error class reader SQL state when custom is null")
+ }
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/exceptions/SqlScriptingException.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/exceptions/SqlScriptingException.scala
index 28d8177dbb23..105e1fbe2250 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/exceptions/SqlScriptingException.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/exceptions/SqlScriptingException.scala
@@ -27,13 +27,17 @@ private[sql] class SqlScriptingException (
errorClass: String,
cause: Throwable,
val origin: Origin,
- messageParameters: Map[String, String] = Map.empty)
+ messageParameters: Map[String, String] = Map.empty,
+ sqlState: Option[String] = None)
extends Exception(
errorMessageWithLineNumber(Option(origin), errorClass, messageParameters),
cause)
with SparkThrowable {
override def getCondition: String = errorClass
+
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
}
diff --git
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidPlanInput.scala
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidPlanInput.scala
index 7f16d09b9c42..84ef625fcf45 100644
---
a/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidPlanInput.scala
+++
b/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/common/InvalidPlanInput.scala
@@ -26,7 +26,8 @@ import org.apache.spark.{SparkThrowable, SparkThrowableHelper}
final case class InvalidPlanInput(
private val errorCondition: String,
private val messageParameters: Map[String, String],
- private val causeOpt: Option[Throwable])
+ private val causeOpt: Option[Throwable],
+ private val sqlState: Option[String] = None)
extends Exception(
SparkThrowableHelper.getMessage(errorCondition, messageParameters),
causeOpt.orNull)
@@ -34,6 +35,8 @@ final case class InvalidPlanInput(
override def getCondition: String = errorCondition
+ override def getSqlState: String = sqlState.getOrElse(super.getSqlState)
+
override def getMessageParameters: java.util.Map[String, String] =
messageParameters.asJava
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]