This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 6546988ead0 [SPARK-43041][SQL] Restore constructors of exceptions for 
compatibility in connector API
6546988ead0 is described below

commit 6546988ead06af8de33108ad0eb3f25af839eadc
Author: aokolnychyi <aokolnyc...@apple.com>
AuthorDate: Wed Apr 5 22:10:50 2023 -0700

    [SPARK-43041][SQL] Restore constructors of exceptions for compatibility in 
connector API
    
    ### What changes were proposed in this pull request?
    
    This PR adds back old constructors for exceptions used in the public 
connector API based on Spark 3.3.
    
    ### Why are the changes needed?
    
    These changes are needed to avoid breaking connectors when consuming Spark 
3.4.
    
    Here is a list of exceptions used in the connector API 
(`org.apache.spark.sql.connector`):
    
    ```
    NoSuchNamespaceException
    NoSuchTableException
    NoSuchViewException
    NoSuchPartitionException
    NoSuchPartitionsException (not referenced by public Catalog API but I 
assume it may be related to the exception above, which is referenced)
    NoSuchFunctionException
    NoSuchIndexException
    
    NamespaceAlreadyExistsException
    TableAlreadyExistsException
    ViewAlreadyExistsException
    PartitionAlreadyExistsException (not referenced by public Catalog API but I 
assume it may be related to the exception below, which is referenced)
    PartitionsAlreadyExistException
    IndexAlreadyExistsException
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    Adds back previously released constructors.
    
    ### How was this patch tested?
    
    Existing tests.
    
    Closes #40679 from aokolnychyi/spark-43041.
    
    Authored-by: aokolnychyi <aokolnyc...@apple.com>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../catalyst/analysis/AlreadyExistException.scala  | 154 +++++++++++++++++---
 .../catalyst/analysis/NoSuchItemException.scala    | 161 +++++++++++++++++++--
 2 files changed, 282 insertions(+), 33 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
index 762b6155d5d..07a86124a07 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.catalyst.analysis
 
+import org.apache.spark.SparkThrowableHelper
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
@@ -32,34 +33,81 @@ import org.apache.spark.sql.types.StructType
 class DatabaseAlreadyExistsException(db: String)
   extends NamespaceAlreadyExistsException(Array(db))
 
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class NamespaceAlreadyExistsException private(
+    message: String,
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      Some(errorClass),
+      messageParameters)
+  }
 
-class NamespaceAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
   def this(namespace: Array[String]) = {
     this(errorClass = "SCHEMA_ALREADY_EXISTS",
       Map("schemaName" -> quoteNameParts(namespace)))
   }
+
+  def this(message: String) = {
+    this(message, errorClass = None, messageParameters = Map.empty[String, 
String])
+  }
 }
 
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class TableAlreadyExistsException private(
+    message: String,
+    cause: Option[Throwable],
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    cause = cause,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String], cause: 
Option[Throwable]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      cause,
+      Some(errorClass),
+      messageParameters)
+  }
 
-class TableAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String],
-  cause: Option[Throwable] = None)
-  extends AnalysisException(errorClass, messageParameters, cause = cause) {
   def this(db: String, table: String) = {
     this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
       messageParameters = Map("relationName" ->
-        (quoteIdentifier(db) + "." + quoteIdentifier(table))))
+        (quoteIdentifier(db) + "." + quoteIdentifier(table))),
+      cause = None)
   }
 
   def this(table: String) = {
     this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
       messageParameters = Map("relationName" ->
-        quoteNameParts(UnresolvedAttribute.parseAttributeName(table))))
+        quoteNameParts(UnresolvedAttribute.parseAttributeName(table))),
+      cause = None)
   }
 
   def this(table: Seq[String]) = {
     this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" -> quoteNameParts(table)))
+      messageParameters = Map("relationName" -> quoteNameParts(table)),
+      cause = None)
+  }
+
+  def this(tableIdent: Identifier) = {
+    this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
+      messageParameters = Map("relationName" -> tableIdent.quoted),
+      cause = None)
+  }
+
+  def this(message: String, cause: Option[Throwable] = None) = {
+    this(message, cause, errorClass = None, messageParameters = 
Map.empty[String, String])
   }
 }
 
@@ -73,6 +121,7 @@ class TempTableAlreadyExistsException(errorClass: String, 
messageParameters: Map
   }
 }
 
+// any changes to this class should be backward compatible as it may be used 
by external connectors
 class ViewAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String])
   extends AnalysisException(errorClass, messageParameters) {
 
@@ -81,8 +130,23 @@ class ViewAlreadyExistsException(errorClass: String, 
messageParameters: Map[Stri
       messageParameters = Map("relationName" -> ident.quoted))
 }
 
-class PartitionAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class PartitionAlreadyExistsException private(
+    message: String,
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      Some(errorClass),
+      messageParameters)
+  }
+
   def this(db: String, table: String, spec: TablePartitionSpec) = {
     this(errorClass = "PARTITIONS_ALREADY_EXIST",
       Map("partitionList" -> ("PARTITION (" +
@@ -97,10 +161,29 @@ class PartitionAlreadyExistsException(errorClass: String, 
messageParameters: Map
         .map( kv => quoteIdentifier(s"${kv._2}") + s" = ${kv._1}").mkString(", 
") + ")"),
         "tableName" -> 
quoteNameParts(UnresolvedAttribute.parseAttributeName(tableName))))
   }
+
+  def this(message: String) = {
+    this(message, errorClass = None, messageParameters = Map.empty[String, 
String])
+  }
 }
 
-class PartitionsAlreadyExistException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class PartitionsAlreadyExistException private(
+    message: String,
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      Some(errorClass),
+      messageParameters)
+  }
+
   def this(db: String, table: String, specs: Seq[TablePartitionSpec]) = {
     this(errorClass = "PARTITIONS_ALREADY_EXIST",
       Map("partitionList" ->
@@ -125,8 +208,13 @@ class PartitionsAlreadyExistException(errorClass: String, 
messageParameters: Map
 
   def this(tableName: String, partitionIdent: InternalRow, partitionSchema: 
StructType) =
     this(tableName, Seq(partitionIdent), partitionSchema)
+
+  def this(message: String) = {
+    this(message, errorClass = None, messageParameters = Map.empty[String, 
String])
+  }
 }
 
+// any changes to this class should be backward compatible as it may be used 
by external connectors
 class FunctionAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String])
   extends AnalysisException(errorClass, messageParameters) {
 
@@ -134,14 +222,40 @@ class FunctionAlreadyExistsException(errorClass: String, 
messageParameters: Map[
     this (errorClass = "ROUTINE_ALREADY_EXISTS",
       Map("routineName" -> quoteNameParts(function)))
   }
+
+  def this(db: String, func: String) = {
+    this(Seq(db, func))
+  }
 }
 
-class IndexAlreadyExistsException(
-    indexName: String,
-    tableName: String,
-    cause: Option[Throwable] = None)
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class IndexAlreadyExistsException private(
+    message: String,
+    cause: Option[Throwable],
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
   extends AnalysisException(
-    errorClass = "INDEX_ALREADY_EXISTS",
-    Map("indexName" -> indexName, "tableName" -> tableName),
-    cause
-  )
+    message,
+    cause = cause,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(
+      errorClass: String,
+      messageParameters: Map[String, String],
+      cause: Option[Throwable]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      cause,
+      Some(errorClass),
+      messageParameters)
+  }
+
+  def this(indexName: String, tableName: String, cause: Option[Throwable]) = {
+    this("INDEX_ALREADY_EXISTS", Map("indexName" -> indexName, "tableName" -> 
tableName), cause)
+  }
+
+  def this(message: String, cause: Option[Throwable] = None) = {
+    this(message, cause, errorClass = None, messageParameters = 
Map.empty[String, String])
+  }
+}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
index 8dd46c06e76..7d45e29d0f8 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.catalyst.analysis
 
+import org.apache.spark.SparkThrowableHelper
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
@@ -34,21 +35,60 @@ case class NoSuchDatabaseException(db: String)
   extends AnalysisException(errorClass = "SCHEMA_NOT_FOUND",
     messageParameters = Map("schemaName" -> quoteIdentifier(db)))
 
-class NoSuchNamespaceException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class NoSuchNamespaceException private(
+    message: String,
+    cause: Option[Throwable],
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    cause = cause,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      cause = None,
+      Some(errorClass),
+      messageParameters)
+  }
 
   def this(namespace: Seq[String]) = {
     this(errorClass = "SCHEMA_NOT_FOUND",
       Map("schemaName" -> quoteNameParts(namespace)))
   }
+
   def this(namespace: Array[String]) = {
     this(errorClass = "SCHEMA_NOT_FOUND",
       Map("schemaName" -> quoteNameParts(namespace)))
   }
+
+  def this(message: String, cause: Option[Throwable] = None) = {
+    this(message, cause, errorClass = None, messageParameters = 
Map.empty[String, String])
+  }
 }
 
-class NoSuchTableException(errorClass: String, messageParameters: Map[String, 
String])
-  extends AnalysisException(errorClass, messageParameters) {
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class NoSuchTableException private(
+    message: String,
+    cause: Option[Throwable],
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    cause = cause,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      cause = None,
+      Some(errorClass),
+      messageParameters)
+  }
 
   def this(db: String, table: String) = {
     this(errorClass = "TABLE_OR_VIEW_NOT_FOUND",
@@ -60,8 +100,18 @@ class NoSuchTableException(errorClass: String, 
messageParameters: Map[String, St
     this(errorClass = "TABLE_OR_VIEW_NOT_FOUND",
       messageParameters = Map("relationName" -> quoteNameParts(name)))
   }
+
+  def this(tableIdent: Identifier) = {
+    this(errorClass = "TABLE_OR_VIEW_NOT_FOUND",
+      messageParameters = Map("relationName" -> tableIdent.quoted))
+  }
+
+  def this(message: String, cause: Option[Throwable] = None) = {
+    this(message, cause, errorClass = None, messageParameters = 
Map.empty[String, String])
+  }
 }
 
+// any changes to this class should be backward compatible as it may be used 
by external connectors
 class NoSuchViewException(errorClass: String, messageParameters: Map[String, 
String])
   extends AnalysisException(errorClass, messageParameters) {
 
@@ -70,8 +120,22 @@ class NoSuchViewException(errorClass: String, 
messageParameters: Map[String, Str
       messageParameters = Map("relationName" -> ident.quoted))
 }
 
-class NoSuchPartitionException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class NoSuchPartitionException private(
+    message: String,
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      Some(errorClass),
+      messageParameters)
+  }
 
   def this(db: String, table: String, spec: TablePartitionSpec) = {
     this(errorClass = "PARTITIONS_NOT_FOUND",
@@ -88,14 +152,35 @@ class NoSuchPartitionException(errorClass: String, 
messageParameters: Map[String
         .map( kv => quoteIdentifier(s"${kv._2}") + s" = ${kv._1}").mkString(", 
") + ")"),
         "tableName" -> 
quoteNameParts(UnresolvedAttribute.parseAttributeName(tableName))))
   }
+
+  def this(message: String) = {
+    this(message, errorClass = None, messageParameters = Map.empty[String, 
String])
+  }
 }
 
 class NoSuchPermanentFunctionException(db: String, func: String)
   extends AnalysisException(errorClass = "ROUTINE_NOT_FOUND",
     Map("routineName" -> (quoteIdentifier(db) + "." + quoteIdentifier(func))))
 
-class NoSuchFunctionException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class NoSuchFunctionException private(
+    message: String,
+    cause: Option[Throwable],
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    cause = cause,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      cause = None,
+      Some(errorClass),
+      messageParameters)
+  }
 
   def this(db: String, func: String) = {
     this(errorClass = "ROUTINE_NOT_FOUND",
@@ -105,10 +190,28 @@ class NoSuchFunctionException(errorClass: String, 
messageParameters: Map[String,
   def this(identifier: Identifier) = {
     this(errorClass = "ROUTINE_NOT_FOUND", Map("routineName" -> 
identifier.quoted))
   }
+
+  def this(message: String, cause: Option[Throwable] = None) = {
+    this(message, cause, errorClass = None, messageParameters = 
Map.empty[String, String])
+  }
 }
 
-class NoSuchPartitionsException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class NoSuchPartitionsException private(
+    message: String,
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(errorClass: String, messageParameters: Map[String, String]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      Some(errorClass),
+      messageParameters)
+  }
 
   def this(db: String, table: String, specs: Seq[TablePartitionSpec]) = {
     this(errorClass = "PARTITIONS_NOT_FOUND",
@@ -126,11 +229,43 @@ class NoSuchPartitionsException(errorClass: String, 
messageParameters: Map[Strin
           .mkString(", ")).mkString("), PARTITION (") + ")"),
         "tableName" -> 
quoteNameParts(UnresolvedAttribute.parseAttributeName(tableName))))
   }
+
+  def this(message: String) = {
+    this(message, errorClass = None, messageParameters = Map.empty[String, 
String])
+  }
 }
 
 class NoSuchTempFunctionException(func: String)
   extends AnalysisException(errorClass = "ROUTINE_NOT_FOUND", 
Map("routineName" -> s"`$func`"))
 
-class NoSuchIndexException(indexName: String, tableName: String, cause: 
Option[Throwable] = None)
-  extends AnalysisException(errorClass = "INDEX_NOT_FOUND",
-    Map("indexName" -> indexName, "tableName" -> tableName), cause)
+// any changes to this class should be backward compatible as it may be used 
by external connectors
+class NoSuchIndexException private(
+    message: String,
+    cause: Option[Throwable],
+    errorClass: Option[String],
+    messageParameters: Map[String, String])
+  extends AnalysisException(
+    message,
+    cause = cause,
+    errorClass = errorClass,
+    messageParameters = messageParameters) {
+
+  def this(
+      errorClass: String,
+      messageParameters: Map[String, String],
+      cause: Option[Throwable]) = {
+    this(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters),
+      cause,
+      Some(errorClass),
+      messageParameters)
+  }
+
+  def this(indexName: String, tableName: String, cause: Option[Throwable]) = {
+    this("INDEX_NOT_FOUND", Map("indexName" -> indexName, "tableName" -> 
tableName), cause)
+  }
+
+  def this(message: String, cause: Option[Throwable] = None) = {
+    this(message, cause, errorClass = None, messageParameters = 
Map.empty[String, String])
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to