This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b05c61266d83 [SPARK-46490][SQL] Require error classes in 
`SparkThrowable` sub-classes
b05c61266d83 is described below

commit b05c61266d83590dcec642ecae929d6529b0ad1d
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Sat Dec 30 12:28:23 2023 +0300

    [SPARK-46490][SQL] Require error classes in `SparkThrowable` sub-classes
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to create `SparkThrowable` sub-classes only with an 
error class by making the constructor with `message` private.
    
    ### Why are the changes needed?
    To improve user experience with Spark SQL by unifying error exceptions: the 
final goal is all Spark exception should contain an error class.
    
    ### Does this PR introduce _any_ user-facing change?
    No since user's code shouldn't throw `SparkThrowable` sub-classes but it 
can if it depends on error message formats.
    
    ### How was this patch tested?
    By existing test test suites like:
    ```
    $ PYSPARK_PYTHON=python3 build/sbt "sql/testOnly 
org.apache.spark.sql.SQLQueryTestSuite"
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #44464 from MaxGekk/ban-messages-SparkThrowable-subclass.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/resources/error/error-classes.json    |  30 ++++++
 .../scala/org/apache/spark/SparkException.scala    | 112 ++++++---------------
 .../connect/client/SparkConnectClientSuite.scala   |  47 +++++----
 .../connect/client/GrpcExceptionConverter.scala    |  88 ++++++++--------
 4 files changed, 135 insertions(+), 142 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 9f68d4c5a53e..4f34ca29ea65 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -7073,6 +7073,36 @@
       "Namespace '<namespace>' is non empty. <details>"
     ]
   },
+  "_LEGACY_ERROR_TEMP_3104" : {
+    "message" : [
+      "<message>"
+    ]
+  },
+  "_LEGACY_ERROR_TEMP_3105" : {
+    "message" : [
+      "<message>"
+    ]
+  },
+  "_LEGACY_ERROR_TEMP_3106" : {
+    "message" : [
+      "<message>"
+    ]
+  },
+  "_LEGACY_ERROR_TEMP_3107" : {
+    "message" : [
+      "<message>"
+    ]
+  },
+  "_LEGACY_ERROR_TEMP_3108" : {
+    "message" : [
+      "<message>"
+    ]
+  },
+  "_LEGACY_ERROR_TEMP_3109" : {
+    "message" : [
+      "<message>"
+    ]
+  },
   "_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
     "message" : [
       "<errorMessage>"
diff --git a/common/utils/src/main/scala/org/apache/spark/SparkException.scala 
b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
index 3bcdd0a7c29b..d2a1c6727730 100644
--- a/common/utils/src/main/scala/org/apache/spark/SparkException.scala
+++ b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
@@ -133,11 +133,11 @@ private[spark] case class ExecutorDeadException(message: 
String)
 /**
  * Exception thrown when Spark returns different result after upgrading to a 
new version.
  */
-private[spark] class SparkUpgradeException(
-  message: String,
-  cause: Option[Throwable],
-  errorClass: Option[String],
-  messageParameters: Map[String, String])
+private[spark] class SparkUpgradeException private(
+    message: String,
+    cause: Option[Throwable],
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
   extends RuntimeException(message, cause.orNull) with SparkThrowable {
 
   def this(
@@ -152,15 +152,6 @@ private[spark] class SparkUpgradeException(
     )
   }
 
-  def this(message: String, cause: Option[Throwable]) = {
-    this(
-      message,
-      cause = cause,
-      errorClass = None,
-      messageParameters = Map.empty
-    )
-  }
-
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
   override def getErrorClass: String = errorClass.orNull
@@ -169,7 +160,7 @@ private[spark] class SparkUpgradeException(
 /**
  * Arithmetic exception thrown from Spark with an error class.
  */
-private[spark] class SparkArithmeticException(
+private[spark] class SparkArithmeticException private(
     message: String,
     errorClass: Option[String],
     messageParameters: Map[String, String],
@@ -189,14 +180,10 @@ private[spark] class SparkArithmeticException(
     )
   }
 
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = None,
-      messageParameters = Map.empty,
-      context = Array.empty
-    )
-  }
+  def this(
+    errorClass: String,
+    messageParameters: Map[String, String],
+    context: Array[QueryContext]) = this(errorClass, messageParameters, 
context, "")
 
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
@@ -207,7 +194,7 @@ private[spark] class SparkArithmeticException(
 /**
  * Unsupported operation exception thrown from Spark with an error class.
  */
-private[spark] class SparkUnsupportedOperationException(
+private[spark] class SparkUnsupportedOperationException private(
   message: String,
   errorClass: Option[String],
   messageParameters: Map[String, String])
@@ -223,14 +210,6 @@ private[spark] class SparkUnsupportedOperationException(
     )
   }
 
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = None,
-      messageParameters = Map.empty
-    )
-  }
-
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
   override def getErrorClass: String = errorClass.orNull
@@ -271,7 +250,7 @@ private[spark] class SparkConcurrentModificationException(
 /**
  * Datetime exception thrown from Spark with an error class.
  */
-private[spark] class SparkDateTimeException(
+private[spark] class SparkDateTimeException private(
     message: String,
     errorClass: Option[String],
     messageParameters: Map[String, String],
@@ -291,14 +270,10 @@ private[spark] class SparkDateTimeException(
     )
   }
 
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = None,
-      messageParameters = Map.empty,
-      context = Array.empty
-    )
-  }
+  def this(
+    errorClass: String,
+    messageParameters: Map[String, String],
+    context: Array[QueryContext]) = this(errorClass, messageParameters, 
context, "")
 
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
@@ -324,7 +299,7 @@ private[spark] class SparkFileNotFoundException(
 /**
  * Number format exception thrown from Spark with an error class.
  */
-private[spark] class SparkNumberFormatException private[spark](
+private[spark] class SparkNumberFormatException private(
     message: String,
     errorClass: Option[String],
     messageParameters: Map[String, String],
@@ -345,14 +320,10 @@ private[spark] class SparkNumberFormatException 
private[spark](
     )
   }
 
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = None,
-      messageParameters = Map.empty,
-      context = Array.empty
-    )
-  }
+  def this(
+    errorClass: String,
+    messageParameters: Map[String, String],
+    context: Array[QueryContext]) = this(errorClass, messageParameters, 
context, "")
 
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
@@ -363,7 +334,7 @@ private[spark] class SparkNumberFormatException 
private[spark](
 /**
  * Illegal argument exception thrown from Spark with an error class.
  */
-private[spark] class SparkIllegalArgumentException(
+private[spark] class SparkIllegalArgumentException private(
     message: String,
     cause: Option[Throwable],
     errorClass: Option[String],
@@ -387,30 +358,19 @@ private[spark] class SparkIllegalArgumentException(
     )
   }
 
-  def this(message: String, cause: Option[Throwable]) = {
-    this(
-      message,
-      cause = cause,
-      errorClass = None,
-      messageParameters = Map.empty,
-      context = Array.empty
-    )
-  }
-
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
   override def getErrorClass: String = errorClass.orNull
   override def getQueryContext: Array[QueryContext] = context
 }
 
-private[spark] class SparkRuntimeException(
+private[spark] class SparkRuntimeException private(
     message: String,
     cause: Option[Throwable],
     errorClass: Option[String],
     messageParameters: Map[String, String],
     context: Array[QueryContext])
-  extends RuntimeException(message, cause.orNull)
-    with SparkThrowable {
+  extends RuntimeException(message, cause.orNull) with SparkThrowable {
 
   def this(
     errorClass: String,
@@ -427,16 +387,6 @@ private[spark] class SparkRuntimeException(
     )
   }
 
-  def this(message: String, cause: Option[Throwable]) = {
-    this(
-      message,
-      cause = cause,
-      errorClass = None,
-      messageParameters = Map.empty,
-      context = Array.empty
-    )
-  }
-
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
   override def getErrorClass: String = errorClass.orNull
@@ -480,7 +430,7 @@ private[spark] class SparkSecurityException(
 /**
  * Array index out of bounds exception thrown from Spark with an error class.
  */
-private[spark] class SparkArrayIndexOutOfBoundsException(
+private[spark] class SparkArrayIndexOutOfBoundsException private(
   message: String,
   errorClass: Option[String],
   messageParameters: Map[String, String],
@@ -501,14 +451,10 @@ private[spark] class SparkArrayIndexOutOfBoundsException(
     )
   }
 
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = None,
-      messageParameters = Map.empty,
-      context = Array.empty
-    )
-  }
+  def this(
+    errorClass: String,
+    messageParameters: Map[String, String],
+    context: Array[QueryContext]) = this(errorClass, messageParameters, 
context, "")
 
   override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
 
diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
index 698457ddb91d..d14caebe5b81 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
@@ -211,23 +211,36 @@ class SparkConnectClientSuite extends ConnectFunSuite 
with BeforeAndAfterEach {
     }
   }
 
-  for ((name, constructor) <- GrpcExceptionConverter.errorFactory) {
-    test(s"error framework parameters - $name") {
-      val testParams = GrpcExceptionConverter.ErrorParams(
-        message = "Found duplicate keys `abc`",
-        cause = None,
-        errorClass = Some("DUPLICATE_KEY"),
-        messageParameters = Map("keyColumn" -> "`abc`"),
-        queryContext = Array.empty)
-      val error = constructor(testParams)
-      assert(error.getMessage.contains(testParams.message))
-      assert(error.getCause == null)
-      error match {
-        case sparkThrowable: SparkThrowable =>
-          assert(sparkThrowable.getErrorClass == testParams.errorClass.get)
-          assert(sparkThrowable.getMessageParameters.asScala == 
testParams.messageParameters)
-          assert(sparkThrowable.getQueryContext.isEmpty)
-        case _ =>
+  test("error framework parameters") {
+    val errors = GrpcExceptionConverter.errorFactory
+    for ((name, constructor) <- errors if name.startsWith("org.apache.spark")) 
{
+      withClue(name) {
+        val testParams = GrpcExceptionConverter.ErrorParams(
+          message = "",
+          cause = None,
+          errorClass = Some("DUPLICATE_KEY"),
+          messageParameters = Map("keyColumn" -> "`abc`"),
+          queryContext = Array.empty)
+        val error = constructor(testParams).asInstanceOf[Throwable with 
SparkThrowable]
+        assert(error.getMessage.contains(testParams.message))
+        assert(error.getCause == null)
+        assert(error.getErrorClass == testParams.errorClass.get)
+        assert(error.getMessageParameters.asScala == 
testParams.messageParameters)
+        assert(error.getQueryContext.isEmpty)
+      }
+    }
+
+    for ((name, constructor) <- errors if 
!name.startsWith("org.apache.spark")) {
+      withClue(name) {
+        val testParams = GrpcExceptionConverter.ErrorParams(
+          message = "Found duplicate keys `abc`",
+          cause = None,
+          errorClass = None,
+          messageParameters = Map.empty,
+          queryContext = Array.empty)
+        val error = constructor(testParams)
+        assert(error.getMessage.contains(testParams.message))
+        assert(error.getCause == null)
       }
     }
   }
diff --git 
a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
 
b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
index cc47924de3b0..6641e8c73fc7 100644
--- 
a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
+++ 
b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/client/GrpcExceptionConverter.scala
@@ -196,21 +196,15 @@ private[client] object GrpcExceptionConverter {
         errorClass = params.errorClass.orNull,
         messageParameters = params.messageParameters,
         queryContext = params.queryContext)),
-    errorConstructor(params => {
-      if (params.errorClass.isEmpty) {
-        new AnalysisException(
-          errorClass = "_LEGACY_ERROR_TEMP_3100",
-          messageParameters = Map("message" -> params.message),
-          cause = params.cause,
-          context = params.queryContext)
-      } else {
-        new AnalysisException(
-          errorClass = params.errorClass.get,
-          messageParameters = params.messageParameters,
-          cause = params.cause,
-          context = params.queryContext)
-      }
-    }),
+    errorConstructor(params =>
+      new AnalysisException(
+        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3100"),
+        messageParameters = params.errorClass match {
+          case Some(_) => params.messageParameters
+          case None => Map("message" -> params.message)
+        },
+        cause = params.cause,
+        context = params.queryContext)),
     errorConstructor(params =>
       new NamespaceAlreadyExistsException(params.errorClass.orNull, 
params.messageParameters)),
     errorConstructor(params =>
@@ -232,53 +226,63 @@ private[client] object GrpcExceptionConverter {
       new NoSuchTableException(params.errorClass.orNull, 
params.messageParameters, params.cause)),
     errorConstructor[NumberFormatException](params =>
       new SparkNumberFormatException(
-        params.message,
-        params.errorClass,
-        params.messageParameters,
+        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3104"),
+        messageParameters = params.errorClass match {
+          case Some(_) => params.messageParameters
+          case None => Map("message" -> params.message)
+        },
         params.queryContext)),
     errorConstructor[IllegalArgumentException](params =>
       new SparkIllegalArgumentException(
-        params.message,
-        params.cause,
-        params.errorClass,
-        params.messageParameters,
-        params.queryContext)),
+        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3105"),
+        messageParameters = params.errorClass match {
+          case Some(_) => params.messageParameters
+          case None => Map("message" -> params.message)
+        },
+        params.queryContext,
+        cause = params.cause.orNull)),
     errorConstructor[ArithmeticException](params =>
       new SparkArithmeticException(
-        params.message,
-        params.errorClass,
-        params.messageParameters,
+        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3106"),
+        messageParameters = params.errorClass match {
+          case Some(_) => params.messageParameters
+          case None => Map("message" -> params.message)
+        },
         params.queryContext)),
     errorConstructor[UnsupportedOperationException](params =>
       new SparkUnsupportedOperationException(
-        params.message,
-        params.errorClass,
-        params.messageParameters)),
+        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3107"),
+        messageParameters = params.errorClass match {
+          case Some(_) => params.messageParameters
+          case None => Map("message" -> params.message)
+        })),
     errorConstructor[ArrayIndexOutOfBoundsException](params =>
       new SparkArrayIndexOutOfBoundsException(
-        params.message,
-        params.errorClass,
-        params.messageParameters,
+        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3108"),
+        messageParameters = params.errorClass match {
+          case Some(_) => params.messageParameters
+          case None => Map("message" -> params.message)
+        },
         params.queryContext)),
     errorConstructor[DateTimeException](params =>
       new SparkDateTimeException(
-        params.message,
-        params.errorClass,
-        params.messageParameters,
+        errorClass = params.errorClass.getOrElse("_LEGACY_ERROR_TEMP_3109"),
+        messageParameters = params.errorClass match {
+          case Some(_) => params.messageParameters
+          case None => Map("message" -> params.message)
+        },
         params.queryContext)),
     errorConstructor(params =>
       new SparkRuntimeException(
-        params.message,
-        params.cause,
-        params.errorClass,
+        params.errorClass.orNull,
         params.messageParameters,
+        params.cause.orNull,
         params.queryContext)),
     errorConstructor(params =>
       new SparkUpgradeException(
-        params.message,
-        params.cause,
-        params.errorClass,
-        params.messageParameters)),
+        params.errorClass.orNull,
+        params.messageParameters,
+        params.cause.orNull)),
     errorConstructor(params =>
       new SparkException(
         message = params.message,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to