This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 1db23d263a1 [SPARK-44691][SQL][CONNECT] Move Subclasses of 
AnalysisException to sql/api
1db23d263a1 is described below

commit 1db23d263a16a3a42c8e9280e29f99b25461d8a7
Author: Yihong He <yihong...@databricks.com>
AuthorDate: Thu Aug 10 14:56:50 2023 +0800

    [SPARK-44691][SQL][CONNECT] Move Subclasses of AnalysisException to sql/api
    
    ### What changes were proposed in this pull request?
    
    - Move subclasses of AnalysisException to sql/api
    - Refactor some dependent utilities
    
    ### Why are the changes needed?
    
    - Supporting throws these exceptions for better compatibility with the 
existing control flow behaviors
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    - Existing tests
    
    Closes #42363 from heyihong/SPARK-44691.
    
    Authored-by: Yihong He <yihong...@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit d5f1f17278a772689a394e2d10751e5f6655fdbc)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../spark/sql/connector/catalog/Identifier.java    |   0
 .../sql/connector/catalog/IdentifierImpl.java      |  10 +-
 .../sql/catalyst/analysis/NonEmptyException.scala  |   4 +-
 .../catalyst/analysis/alreadyExistException.scala} | 105 +-----------
 .../catalyst/analysis/noSuchItemsExceptions.scala} |  96 +----------
 .../spark/sql/catalyst/util/QuotingUtils.scala     |  24 +++
 .../spark/sql/catalyst/util/StringUtils.scala      |   6 -
 .../spark/sql/errors/DataTypeErrorsBase.scala      |   4 +-
 .../catalyst/analysis/AlreadyExistException.scala  | 163 ------------------
 .../catalyst/analysis/NoSuchItemException.scala    | 185 ---------------------
 .../apache/spark/sql/catalyst/util/package.scala   |   4 +-
 .../sql/connector/catalog/CatalogV2Implicits.scala |  10 +-
 12 files changed, 48 insertions(+), 563 deletions(-)

diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java
 b/sql/api/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java
similarity index 100%
rename from 
sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java
rename to 
sql/api/src/main/java/org/apache/spark/sql/connector/catalog/Identifier.java
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
 
b/sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
similarity index 91%
rename from 
sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
rename to 
sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
index c7aceecabac..61894b18fe6 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
+++ 
b/sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
@@ -17,14 +17,14 @@
 
 package org.apache.spark.sql.connector.catalog;
 
+import org.apache.arrow.util.Preconditions;
+
 import java.util.Arrays;
 import java.util.Objects;
 import java.util.StringJoiner;
 
-import com.google.common.base.Preconditions;
-
 import org.apache.spark.annotation.Evolving;
-import org.apache.spark.sql.catalyst.util.package$;
+import org.apache.spark.sql.catalyst.util.QuotingUtils;
 
 /**
  *  An {@link Identifier} implementation.
@@ -56,9 +56,9 @@ class IdentifierImpl implements Identifier {
   public String toString() {
     StringJoiner joiner = new StringJoiner(".");
     for (String p : namespace) {
-      joiner.add(package$.MODULE$.quoteIfNeeded(p));
+      joiner.add(QuotingUtils.quoteIfNeeded(p));
     }
-    joiner.add(package$.MODULE$.quoteIfNeeded(name));
+    joiner.add(QuotingUtils.quoteIfNeeded(name));
     return joiner.toString();
   }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NonEmptyException.scala
 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/NonEmptyException.scala
similarity index 91%
rename from 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NonEmptyException.scala
rename to 
sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/NonEmptyException.scala
index f3ff28f74fc..ecd57672b61 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NonEmptyException.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/NonEmptyException.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
+import org.apache.spark.sql.catalyst.util.QuotingUtils.quoted
 
 
 /**
@@ -31,6 +31,6 @@ case class NonEmptyNamespaceException(
   extends AnalysisException(message, cause = cause) {
 
   def this(namespace: Array[String]) = {
-    this(s"Namespace '${namespace.quoted}' is non empty.")
+    this(s"Namespace '${quoted(namespace)}' is non empty.")
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/alreadyExistException.scala
similarity index 59%
copy from 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
copy to 
sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/alreadyExistException.scala
index 670a4384da2..625b6b7c4c5 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/alreadyExistException.scala
@@ -19,12 +19,9 @@ package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.SparkThrowableHelper
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
-import org.apache.spark.sql.catalyst.util.{quoteIdentifier, quoteNameParts }
-import 
org.apache.spark.sql.connector.catalog.CatalogV2Implicits.IdentifierHelper
+import org.apache.spark.sql.catalyst.util.AttributeNameParser
+import org.apache.spark.sql.catalyst.util.QuotingUtils.{quoted, 
quoteIdentifier, quoteNameParts}
 import org.apache.spark.sql.connector.catalog.Identifier
-import org.apache.spark.sql.types.StructType
 
 /**
  * Thrown by a catalog when an item already exists. The analyzer will rethrow 
the exception
@@ -93,7 +90,7 @@ class TableAlreadyExistsException private(
   def this(table: String) = {
     this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
       messageParameters = Map("relationName" ->
-        quoteNameParts(UnresolvedAttribute.parseAttributeName(table))),
+        quoteNameParts(AttributeNameParser.parseAttributeName(table))),
       cause = None)
   }
 
@@ -105,7 +102,7 @@ class TableAlreadyExistsException private(
 
   def this(tableIdent: Identifier) = {
     this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" -> tableIdent.quoted),
+      messageParameters = Map("relationName" -> quoted(tableIdent)),
       cause = None)
   }
 
@@ -124,7 +121,7 @@ class TempTableAlreadyExistsException(errorClass: String, 
messageParameters: Map
   def this(table: String) = {
     this(errorClass = "TEMP_TABLE_OR_VIEW_ALREADY_EXISTS",
       messageParameters = Map("relationName"
-        -> quoteNameParts(UnresolvedAttribute.parseAttributeName(table))))
+        -> quoteNameParts(AttributeNameParser.parseAttributeName(table))))
   }
 }
 
@@ -134,97 +131,7 @@ class ViewAlreadyExistsException(errorClass: String, 
messageParameters: Map[Stri
 
   def this(ident: Identifier) =
     this(errorClass = "VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" -> ident.quoted))
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class PartitionAlreadyExistsException private(
-    message: String,
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(db: String, table: String, spec: TablePartitionSpec) = {
-    this(errorClass = "PARTITIONS_ALREADY_EXIST",
-      Map("partitionList" -> ("PARTITION (" +
-        spec.map( kv => quoteIdentifier(kv._1) + s" = ${kv._2}").mkString(", 
") + ")"),
-        "tableName" -> (quoteIdentifier(db) + "." + quoteIdentifier(table))))
-  }
-
-  def this(tableName: String, partitionIdent: InternalRow, partitionSchema: 
StructType) = {
-    this(errorClass = "PARTITIONS_ALREADY_EXIST",
-      Map("partitionList" ->
-        ("PARTITION (" + 
partitionIdent.toSeq(partitionSchema).zip(partitionSchema.map(_.name))
-        .map( kv => quoteIdentifier(s"${kv._2}") + s" = ${kv._1}").mkString(", 
") + ")"),
-        "tableName" -> 
quoteNameParts(UnresolvedAttribute.parseAttributeName(tableName))))
-  }
-
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = Some("PARTITIONS_ALREADY_EXIST"),
-      messageParameters = Map.empty[String, String])
-  }
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class PartitionsAlreadyExistException private(
-    message: String,
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(db: String, table: String, specs: Seq[TablePartitionSpec]) = {
-    this(errorClass = "PARTITIONS_ALREADY_EXIST",
-      Map("partitionList" ->
-        ("PARTITION ("
-        + specs.map(spec => spec.map(kv => quoteIdentifier(kv._1) + s" = 
${kv._2}").mkString(", "))
-        .mkString("), PARTITION (") + ")"),
-        "tableName" -> (quoteIdentifier(db) + "." + quoteIdentifier(table))))
-  }
-
-  def this(db: String, table: String, spec: TablePartitionSpec) =
-    this(db, table, Seq(spec))
-
-  def this(tableName: String, partitionIdents: Seq[InternalRow], 
partitionSchema: StructType) = {
-    this(errorClass = "PARTITIONS_ALREADY_EXIST",
-      Map("partitionList" ->
-        ("PARTITION (" +
-          
partitionIdents.map(_.toSeq(partitionSchema).zip(partitionSchema.map(_.name))
-            .map( kv => quoteIdentifier(s"${kv._2}") + s" = ${kv._1}")
-            .mkString(", ")).mkString("), PARTITION (") + ")"),
-        "tableName" -> 
quoteNameParts(UnresolvedAttribute.parseAttributeName(tableName))))
-  }
-
-  def this(tableName: String, partitionIdent: InternalRow, partitionSchema: 
StructType) =
-    this(tableName, Seq(partitionIdent), partitionSchema)
-
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = Some("PARTITIONS_ALREADY_EXIST"),
-      messageParameters = Map.empty[String, String])
-  }
+      messageParameters = Map("relationName" -> quoted(ident)))
 }
 
 // any changes to this class should be backward compatible as it may be used 
by external connectors
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala
similarity index 64%
copy from 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
copy to 
sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala
index f107f924dcb..7da9777be15 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala
@@ -19,13 +19,8 @@ package org.apache.spark.sql.catalyst.analysis
 
 import org.apache.spark.SparkThrowableHelper
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
-import org.apache.spark.sql.catalyst.util.{quoteIdentifier, quoteNameParts}
-import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
+import org.apache.spark.sql.catalyst.util.QuotingUtils.{quoted, 
quoteIdentifier, quoteNameParts}
 import org.apache.spark.sql.connector.catalog.Identifier
-import org.apache.spark.sql.types.StructType
-
 
 /**
  * Thrown by a catalog when an item cannot be found. The analyzer will rethrow 
the exception
@@ -107,7 +102,7 @@ class NoSuchTableException private(
 
   def this(tableIdent: Identifier) = {
     this(errorClass = "TABLE_OR_VIEW_NOT_FOUND",
-      messageParameters = Map("relationName" -> tableIdent.quoted))
+      messageParameters = Map("relationName" -> quoted(tableIdent)))
   }
 
   def this(message: String, cause: Option[Throwable] = None) = {
@@ -125,48 +120,7 @@ class NoSuchViewException(errorClass: String, 
messageParameters: Map[String, Str
 
   def this(ident: Identifier) =
     this(errorClass = "VIEW_NOT_FOUND",
-      messageParameters = Map("relationName" -> ident.quoted))
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NoSuchPartitionException private(
-    message: String,
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(db: String, table: String, spec: TablePartitionSpec) = {
-    this(errorClass = "PARTITIONS_NOT_FOUND",
-      Map("partitionList" ->
-        ("PARTITION (" +
-          spec.map( kv => quoteIdentifier(kv._1) + s" = ${kv._2}").mkString(", 
") + ")"),
-        "tableName" -> (quoteIdentifier(db) + "." + quoteIdentifier(table))))
-  }
-
-  def this(tableName: String, partitionIdent: InternalRow, partitionSchema: 
StructType) = {
-    this(errorClass = "PARTITIONS_NOT_FOUND",
-      Map("partitionList" ->
-        ("PARTITION (" + 
partitionIdent.toSeq(partitionSchema).zip(partitionSchema.map(_.name))
-        .map( kv => quoteIdentifier(s"${kv._2}") + s" = ${kv._1}").mkString(", 
") + ")"),
-        "tableName" -> 
quoteNameParts(UnresolvedAttribute.parseAttributeName(tableName))))
-  }
-
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = Some("PARTITIONS_NOT_FOUND"),
-      messageParameters = Map.empty[String, String])
-  }
+      messageParameters = Map("relationName" -> quoted(ident)))
 }
 
 class NoSuchPermanentFunctionException(db: String, func: String)
@@ -199,7 +153,7 @@ class NoSuchFunctionException private(
   }
 
   def this(identifier: Identifier) = {
-    this(errorClass = "ROUTINE_NOT_FOUND", Map("routineName" -> 
identifier.quoted))
+    this(errorClass = "ROUTINE_NOT_FOUND", Map("routineName" -> 
quoted(identifier)))
   }
 
   def this(message: String, cause: Option[Throwable] = None) = {
@@ -211,48 +165,6 @@ class NoSuchFunctionException private(
   }
 }
 
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NoSuchPartitionsException private(
-    message: String,
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(db: String, table: String, specs: Seq[TablePartitionSpec]) = {
-    this(errorClass = "PARTITIONS_NOT_FOUND",
-      Map("partitionList" -> ("PARTITION (" +
-        specs.map(spec => spec.map(kv => quoteIdentifier(kv._1) + s" = 
${kv._2}").mkString(", "))
-        .mkString("), PARTITION (") + ")"),
-        "tableName" -> (quoteIdentifier(db) + "." + quoteIdentifier(table))))
-  }
-
-  def this(tableName: String, partitionIdents: Seq[InternalRow], 
partitionSchema: StructType) = {
-    this(errorClass = "PARTITIONS_NOT_FOUND",
-      Map("partitionList" -> ("PARTITION (" +
-        
partitionIdents.map(_.toSeq(partitionSchema).zip(partitionSchema.map(_.name))
-          .map( kv => quoteIdentifier(s"${kv._2}") + s" = ${kv._1}")
-          .mkString(", ")).mkString("), PARTITION (") + ")"),
-        "tableName" -> 
quoteNameParts(UnresolvedAttribute.parseAttributeName(tableName))))
-  }
-
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = Some("PARTITIONS_NOT_FOUND"),
-      messageParameters = Map.empty[String, String])
-  }
-}
-
 class NoSuchTempFunctionException(func: String)
   extends AnalysisException(errorClass = "ROUTINE_NOT_FOUND", 
Map("routineName" -> s"`$func`"))
 
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/QuotingUtils.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/QuotingUtils.scala
index 62015fe206a..43af533b85a 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/QuotingUtils.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/QuotingUtils.scala
@@ -16,6 +16,8 @@
  */
 package org.apache.spark.sql.catalyst.util
 
+import org.apache.spark.sql.connector.catalog.Identifier
+
 object QuotingUtils {
   private def quoteByDefault(elem: String): String = {
     "\"" + elem + "\""
@@ -29,6 +31,16 @@ object QuotingUtils {
     quoteByDefault(schema)
   }
 
+  def quoteIdentifier(name: String): String = {
+    // Escapes back-ticks within the identifier name with double-back-ticks, 
and then quote the
+    // identifier with back-ticks.
+    "`" + name.replace("`", "``") + "`"
+  }
+
+  def quoteNameParts(name: Seq[String]): String = {
+    name.map(part => quoteIdentifier(part)).mkString(".")
+  }
+
   def quoteIfNeeded(part: String): String = {
     if (part.matches("[a-zA-Z0-9_]+") && !part.matches("\\d+")) {
       part
@@ -37,6 +49,18 @@ object QuotingUtils {
     }
   }
 
+  def quoted(namespace: Array[String]): String = {
+    namespace.map(quoteIfNeeded).mkString(".")
+  }
+
+  def quoted(ident: Identifier): String = {
+    if (ident.namespace.nonEmpty) {
+      ident.namespace.map(quoteIfNeeded).mkString(".") + "." + 
quoteIfNeeded(ident.name)
+    } else {
+      quoteIfNeeded(ident.name)
+    }
+  }
+
   def escapeSingleQuotedString(str: String): String = {
     val builder = new StringBuilder
 
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala
index 3ac81b94358..c629302214c 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/StringUtils.scala
@@ -100,12 +100,6 @@ object SparkStringUtils extends Logging {
     truncatedString(seq, "", sep, "", maxFields)
   }
 
-  def quoteIdentifier(name: String): String = {
-    // Escapes back-ticks within the identifier name with double-back-ticks, 
and then quote the
-    // identifier with back-ticks.
-    "`" + name.replace("`", "``") + "`"
-  }
-
   /**
    * Returns a pretty string of the byte array which prints each byte as a hex 
digit and add spaces
    * between them. For example, [1A C0].
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrorsBase.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrorsBase.scala
index 95962f192f2..aed3c681365 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrorsBase.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrorsBase.scala
@@ -20,7 +20,7 @@ import java.util.Locale
 
 import org.apache.spark.QueryContext
 import org.apache.spark.sql.catalyst.trees.SQLQueryContext
-import org.apache.spark.sql.catalyst.util.{AttributeNameParser, QuotingUtils, 
SparkStringUtils}
+import org.apache.spark.sql.catalyst.util.{AttributeNameParser, QuotingUtils}
 import org.apache.spark.sql.types.{AbstractDataType, DataType, TypeCollection}
 import org.apache.spark.unsafe.types.UTF8String
 
@@ -34,7 +34,7 @@ private[sql] trait DataTypeErrorsBase {
       case Seq("__auto_generated_subquery_name", rest @ _*) if rest != Nil => 
rest
       case other => other
     }
-    cleaned.map(SparkStringUtils.quoteIdentifier).mkString(".")
+    cleaned.map(QuotingUtils.quoteIdentifier).mkString(".")
   }
 
   def toSQLStmt(text: String): String = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
index 670a4384da2..bbac5ab7db3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala
@@ -22,121 +22,8 @@ import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
 import org.apache.spark.sql.catalyst.util.{quoteIdentifier, quoteNameParts }
-import 
org.apache.spark.sql.connector.catalog.CatalogV2Implicits.IdentifierHelper
-import org.apache.spark.sql.connector.catalog.Identifier
 import org.apache.spark.sql.types.StructType
 
-/**
- * Thrown by a catalog when an item already exists. The analyzer will rethrow 
the exception
- * as an [[org.apache.spark.sql.AnalysisException]] with the correct position 
information.
- */
-class DatabaseAlreadyExistsException(db: String)
-  extends NamespaceAlreadyExistsException(Array(db))
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NamespaceAlreadyExistsException private(
-    message: String,
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(namespace: Array[String]) = {
-    this(errorClass = "SCHEMA_ALREADY_EXISTS",
-      Map("schemaName" -> quoteNameParts(namespace)))
-  }
-
-  def this(message: String) = {
-    this(
-      message,
-      errorClass = Some("SCHEMA_ALREADY_EXISTS"),
-      messageParameters = Map.empty[String, String])
-  }
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class TableAlreadyExistsException private(
-    message: String,
-    cause: Option[Throwable],
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    cause = cause,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String], cause: 
Option[Throwable]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      cause,
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(db: String, table: String) = {
-    this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" ->
-        (quoteIdentifier(db) + "." + quoteIdentifier(table))),
-      cause = None)
-  }
-
-  def this(table: String) = {
-    this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" ->
-        quoteNameParts(UnresolvedAttribute.parseAttributeName(table))),
-      cause = None)
-  }
-
-  def this(table: Seq[String]) = {
-    this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" -> quoteNameParts(table)),
-      cause = None)
-  }
-
-  def this(tableIdent: Identifier) = {
-    this(errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" -> tableIdent.quoted),
-      cause = None)
-  }
-
-  def this(message: String, cause: Option[Throwable] = None) = {
-    this(
-      message,
-      cause,
-      errorClass = Some("TABLE_OR_VIEW_ALREADY_EXISTS"),
-      messageParameters = Map.empty[String, String])
-  }
-}
-
-class TempTableAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String],
-                                      cause: Option[Throwable] = None)
-  extends AnalysisException(errorClass, messageParameters, cause = cause) {
-  def this(table: String) = {
-    this(errorClass = "TEMP_TABLE_OR_VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName"
-        -> quoteNameParts(UnresolvedAttribute.parseAttributeName(table))))
-  }
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class ViewAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
-
-  def this(ident: Identifier) =
-    this(errorClass = "VIEW_ALREADY_EXISTS",
-      messageParameters = Map("relationName" -> ident.quoted))
-}
-
 // any changes to this class should be backward compatible as it may be used 
by external connectors
 class PartitionAlreadyExistsException private(
     message: String,
@@ -226,53 +113,3 @@ class PartitionsAlreadyExistException private(
       messageParameters = Map.empty[String, String])
   }
 }
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class FunctionAlreadyExistsException(errorClass: String, messageParameters: 
Map[String, String])
-  extends AnalysisException(errorClass, messageParameters) {
-
-  def this(function: Seq[String]) = {
-    this (errorClass = "ROUTINE_ALREADY_EXISTS",
-      Map("routineName" -> quoteNameParts(function)))
-  }
-
-  def this(db: String, func: String) = {
-    this(Seq(db, func))
-  }
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class IndexAlreadyExistsException private(
-    message: String,
-    cause: Option[Throwable],
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    cause = cause,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(
-      errorClass: String,
-      messageParameters: Map[String, String],
-      cause: Option[Throwable]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      cause,
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(indexName: String, tableName: String, cause: Option[Throwable]) = {
-    this("INDEX_ALREADY_EXISTS", Map("indexName" -> indexName, "tableName" -> 
tableName), cause)
-  }
-
-  def this(message: String, cause: Option[Throwable] = None) = {
-    this(
-      message,
-      cause,
-      errorClass = Some("INDEX_ALREADY_EXISTS"),
-      messageParameters = Map.empty[String, String])
-  }
-}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
index f107f924dcb..217c293900e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
@@ -22,112 +22,8 @@ import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
 import org.apache.spark.sql.catalyst.util.{quoteIdentifier, quoteNameParts}
-import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
-import org.apache.spark.sql.connector.catalog.Identifier
 import org.apache.spark.sql.types.StructType
 
-
-/**
- * Thrown by a catalog when an item cannot be found. The analyzer will rethrow 
the exception
- * as an [[org.apache.spark.sql.AnalysisException]] with the correct position 
information.
- */
-case class NoSuchDatabaseException(db: String)
-  extends AnalysisException(errorClass = "SCHEMA_NOT_FOUND",
-    messageParameters = Map("schemaName" -> quoteIdentifier(db)))
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NoSuchNamespaceException private(
-    message: String,
-    cause: Option[Throwable],
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    cause = cause,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      cause = None,
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(namespace: Seq[String]) = {
-    this(errorClass = "SCHEMA_NOT_FOUND",
-      Map("schemaName" -> quoteNameParts(namespace)))
-  }
-
-  def this(namespace: Array[String]) = {
-    this(errorClass = "SCHEMA_NOT_FOUND",
-      Map("schemaName" -> quoteNameParts(namespace)))
-  }
-
-  def this(message: String, cause: Option[Throwable] = None) = {
-    this(
-      message,
-      cause,
-      errorClass = Some("SCHEMA_NOT_FOUND"),
-      messageParameters = Map.empty[String, String])
-  }
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NoSuchTableException private(
-    message: String,
-    cause: Option[Throwable],
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    cause = cause,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      cause = None,
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(db: String, table: String) = {
-    this(errorClass = "TABLE_OR_VIEW_NOT_FOUND",
-      messageParameters = Map("relationName" ->
-        (quoteIdentifier(db) + "." + quoteIdentifier(table))))
-  }
-
-  def this(name : Seq[String]) = {
-    this(errorClass = "TABLE_OR_VIEW_NOT_FOUND",
-      messageParameters = Map("relationName" -> quoteNameParts(name)))
-  }
-
-  def this(tableIdent: Identifier) = {
-    this(errorClass = "TABLE_OR_VIEW_NOT_FOUND",
-      messageParameters = Map("relationName" -> tableIdent.quoted))
-  }
-
-  def this(message: String, cause: Option[Throwable] = None) = {
-    this(
-      message,
-      cause,
-      errorClass = Some("TABLE_OR_VIEW_NOT_FOUND"),
-      messageParameters = Map.empty[String, String])
-  }
-}
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NoSuchViewException(errorClass: String, messageParameters: Map[String, 
String])
-  extends AnalysisException(errorClass, messageParameters) {
-
-  def this(ident: Identifier) =
-    this(errorClass = "VIEW_NOT_FOUND",
-      messageParameters = Map("relationName" -> ident.quoted))
-}
-
 // any changes to this class should be backward compatible as it may be used 
by external connectors
 class NoSuchPartitionException private(
     message: String,
@@ -169,48 +65,6 @@ class NoSuchPartitionException private(
   }
 }
 
-class NoSuchPermanentFunctionException(db: String, func: String)
-  extends AnalysisException(errorClass = "ROUTINE_NOT_FOUND",
-    Map("routineName" -> (quoteIdentifier(db) + "." + quoteIdentifier(func))))
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NoSuchFunctionException private(
-    message: String,
-    cause: Option[Throwable],
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    cause = cause,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(errorClass: String, messageParameters: Map[String, String]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      cause = None,
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(db: String, func: String) = {
-    this(errorClass = "ROUTINE_NOT_FOUND",
-      Map("routineName" -> (quoteIdentifier(db) + "." + 
quoteIdentifier(func))))
-  }
-
-  def this(identifier: Identifier) = {
-    this(errorClass = "ROUTINE_NOT_FOUND", Map("routineName" -> 
identifier.quoted))
-  }
-
-  def this(message: String, cause: Option[Throwable] = None) = {
-    this(
-      message,
-      cause,
-      errorClass = Some("ROUTINE_NOT_FOUND"),
-      messageParameters = Map.empty[String, String])
-  }
-}
-
 // any changes to this class should be backward compatible as it may be used 
by external connectors
 class NoSuchPartitionsException private(
     message: String,
@@ -252,42 +106,3 @@ class NoSuchPartitionsException private(
       messageParameters = Map.empty[String, String])
   }
 }
-
-class NoSuchTempFunctionException(func: String)
-  extends AnalysisException(errorClass = "ROUTINE_NOT_FOUND", 
Map("routineName" -> s"`$func`"))
-
-// any changes to this class should be backward compatible as it may be used 
by external connectors
-class NoSuchIndexException private(
-    message: String,
-    cause: Option[Throwable],
-    errorClass: Option[String],
-    messageParameters: Map[String, String])
-  extends AnalysisException(
-    message,
-    cause = cause,
-    errorClass = errorClass,
-    messageParameters = messageParameters) {
-
-  def this(
-      errorClass: String,
-      messageParameters: Map[String, String],
-      cause: Option[Throwable]) = {
-    this(
-      SparkThrowableHelper.getMessage(errorClass, messageParameters),
-      cause,
-      Some(errorClass),
-      messageParameters)
-  }
-
-  def this(indexName: String, tableName: String, cause: Option[Throwable]) = {
-    this("INDEX_NOT_FOUND", Map("indexName" -> indexName, "tableName" -> 
tableName), cause)
-  }
-
-  def this(message: String, cause: Option[Throwable] = None) = {
-    this(
-      message,
-      cause,
-      errorClass = Some("INDEX_NOT_FOUND"),
-      messageParameters = Map.empty[String, String])
-  }
-}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala
index 78d4beab414..98eb2ade985 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/package.scala
@@ -110,11 +110,11 @@ package object util extends Logging {
   }
 
   def quoteIdentifier(name: String): String = {
-    SparkStringUtils.quoteIdentifier(name)
+    QuotingUtils.quoteIdentifier(name)
   }
 
   def quoteNameParts(name: Seq[String]): String = {
-    name.map(part => quoteIdentifier(part)).mkString(".")
+    QuotingUtils.quoteNameParts(name)
   }
 
   def quoteIfNeeded(part: String): String = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
index 4ca926abad3..5557957bccd 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.catalog.BucketSpec
 import org.apache.spark.sql.catalyst.expressions.AttributeReference
 import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
 import org.apache.spark.sql.catalyst.types.DataTypeUtils
-import org.apache.spark.sql.catalyst.util.quoteIfNeeded
+import org.apache.spark.sql.catalyst.util.{quoteIfNeeded, QuotingUtils}
 import org.apache.spark.sql.connector.expressions.{BucketTransform, 
FieldReference, IdentityTransform, LogicalExpressions, Transform}
 import org.apache.spark.sql.errors.{QueryCompilationErrors, 
QueryExecutionErrors}
 import org.apache.spark.sql.types.StructType
@@ -110,7 +110,7 @@ private[sql] object CatalogV2Implicits {
   }
 
   implicit class NamespaceHelper(namespace: Array[String]) {
-    def quoted: String = namespace.map(quoteIfNeeded).mkString(".")
+    def quoted: String = QuotingUtils.quoted(namespace)
   }
 
   implicit class FunctionIdentifierHelper(ident: FunctionIdentifier) {
@@ -126,11 +126,7 @@ private[sql] object CatalogV2Implicits {
 
   implicit class IdentifierHelper(ident: Identifier) {
     def quoted: String = {
-      if (ident.namespace.nonEmpty) {
-        ident.namespace.map(quoteIfNeeded).mkString(".") + "." + 
quoteIfNeeded(ident.name)
-      } else {
-        quoteIfNeeded(ident.name)
-      }
+      QuotingUtils.quoted(ident)
     }
 
     def original: String = ident.namespace() :+ ident.name() mkString "."


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org


Reply via email to