allisonwang-db commented on a change in pull request #32916:
URL: https://github.com/apache/spark/pull/32916#discussion_r654622367



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,52 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(
+      s"""
+         |Negative scale is not allowed: $scale.
+         |You can use 
${LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key}=true
+         |to enable legacy mode to allow it.
+       """.stripMargin.replaceAll("\n", " "))
+  }
+
+  def keyNotValidColumnInTableError(key: String, tblName: String): Throwable = 
{
+    new AnalysisException(s"$key is not a valid partition column in table 
$tblName.")
+  }
+
+  def partitionSpecNotValidError(

Review comment:
       invalidPartitionSpecError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,52 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(
+      s"""
+         |Negative scale is not allowed: $scale.
+         |You can use 
${LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key}=true
+         |to enable legacy mode to allow it.
+       """.stripMargin.replaceAll("\n", " "))
+  }
+
+  def keyNotValidColumnInTableError(key: String, tblName: String): Throwable = 
{

Review comment:
       invalidPartitionColumnKeyInTableError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(

Review comment:
       cannotInstantiateAbstractCatalogPluginClassError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Cannot instantiate abstract catalog plugin class for " 
+
+      s"catalog '$name': $pluginClassName", e.getCause)
+  }
+
+  def failInstantiatingConstructorError(

Review comment:
       failedToInstantiateConstructorForCatalogError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {

Review comment:
       catalogPluginClassNotImplementedError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,52 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {

Review comment:
       configRemovedInVersionError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {

Review comment:
       catalogPluginClassNotFoundForCatalogError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,52 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(
+      s"""
+         |Negative scale is not allowed: $scale.
+         |You can use 
${LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key}=true
+         |to enable legacy mode to allow it.
+       """.stripMargin.replaceAll("\n", " "))
+  }
+
+  def keyNotValidColumnInTableError(key: String, tblName: String): Throwable = 
{
+    new AnalysisException(s"$key is not a valid partition column in table 
$tblName.")
+  }
+
+  def partitionSpecNotValidError(
+      specKeys: String,
+      partitionColumnNames: Seq[String],
+      tableName: String): Throwable = {
+    new AnalysisException(
+      s"""
+        |Partition spec is invalid. The spec ($specKeys) must match
+        |the partition spec (${partitionColumnNames.mkString(", ")}) defined in
+        |table '$tableName'
+      """.stripMargin.replaceAll("\n", " "))
+  }
+
+  def foundDuplicateColError(colType: String, duplicateCol: Seq[String]): 
Throwable = {

Review comment:
       foundDuplicateColumnError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Cannot instantiate abstract catalog plugin class for " 
+
+      s"catalog '$name': $pluginClassName", e.getCause)
+  }
+
+  def failInstantiatingConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Failed during instantiating constructor for catalog " +
+      s"'$name': $pluginClassName", e.getCause)
+  }
+
+  def noSuchElementExceptionError(): Throwable = {
+    new NoSuchElementException
+  }
+
+  def noSuchElementExceptionError(key: String): Throwable = {
+    new NoSuchElementException(key)
+  }
+
+  def cannotMutateReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot mutate ReadOnlySQLConf.")
+  }
+
+  def cannotCloneCopyReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot clone/copy ReadOnlySQLConf.")
+  }
+
+  def cannotGetSQLCOnfInSchedulerEventLoopThread(): Throwable = {
+    new RuntimeException("Cannot get SQLConf inside scheduler event loop 
thread.")
+  }
+
+  def unsupportedOperationExceptionError(): Throwable = {
+    new UnsupportedOperationException
+  }
+
+  def nullLiteralsCannotCastToError(name: String): Throwable = {
+    new UnsupportedOperationException(s"null literals can't be casted to 
$name")
+  }
+
+  def notUDFError(name: String, userClass: String): Throwable = {
+    new SparkException(s"$name is not an UserDefinedType. Please make sure 
registering " +
+        s"an UserDefinedType for ${userClass}")
+  }
+
+  def cannotLoadUDFError(name: String, userClass: String): Throwable = {
+    new SparkException(s"Can not load in UserDefinedType ${name} for user 
class ${userClass}.")
+  }
+
+  def timestampTypeMustSupplyTZidError(): Throwable = {

Review comment:
       timeZoneIdNotSpecifiedForTimestampTypeError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Cannot instantiate abstract catalog plugin class for " 
+
+      s"catalog '$name': $pluginClassName", e.getCause)
+  }
+
+  def failInstantiatingConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Failed during instantiating constructor for catalog " +
+      s"'$name': $pluginClassName", e.getCause)
+  }
+
+  def noSuchElementExceptionError(): Throwable = {
+    new NoSuchElementException
+  }
+
+  def noSuchElementExceptionError(key: String): Throwable = {
+    new NoSuchElementException(key)
+  }
+
+  def cannotMutateReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot mutate ReadOnlySQLConf.")
+  }
+
+  def cannotCloneCopyReadOnlySQLConfError(): Throwable = {

Review comment:
       cannotCloneOrCopyReadOnlySQLConfError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Cannot instantiate abstract catalog plugin class for " 
+
+      s"catalog '$name': $pluginClassName", e.getCause)
+  }
+
+  def failInstantiatingConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Failed during instantiating constructor for catalog " +
+      s"'$name': $pluginClassName", e.getCause)
+  }
+
+  def noSuchElementExceptionError(): Throwable = {
+    new NoSuchElementException
+  }
+
+  def noSuchElementExceptionError(key: String): Throwable = {
+    new NoSuchElementException(key)
+  }
+
+  def cannotMutateReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot mutate ReadOnlySQLConf.")
+  }
+
+  def cannotCloneCopyReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot clone/copy ReadOnlySQLConf.")
+  }
+
+  def cannotGetSQLCOnfInSchedulerEventLoopThread(): Throwable = {
+    new RuntimeException("Cannot get SQLConf inside scheduler event loop 
thread.")
+  }
+
+  def unsupportedOperationExceptionError(): Throwable = {
+    new UnsupportedOperationException
+  }
+
+  def nullLiteralsCannotCastToError(name: String): Throwable = {

Review comment:
       nullLiteralsCannotBeCastedError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Cannot instantiate abstract catalog plugin class for " 
+
+      s"catalog '$name': $pluginClassName", e.getCause)
+  }
+
+  def failInstantiatingConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Failed during instantiating constructor for catalog " +
+      s"'$name': $pluginClassName", e.getCause)
+  }
+
+  def noSuchElementExceptionError(): Throwable = {
+    new NoSuchElementException
+  }
+
+  def noSuchElementExceptionError(key: String): Throwable = {
+    new NoSuchElementException(key)
+  }
+
+  def cannotMutateReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot mutate ReadOnlySQLConf.")
+  }
+
+  def cannotCloneCopyReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot clone/copy ReadOnlySQLConf.")
+  }
+
+  def cannotGetSQLCOnfInSchedulerEventLoopThread(): Throwable = {
+    new RuntimeException("Cannot get SQLConf inside scheduler event loop 
thread.")
+  }
+
+  def unsupportedOperationExceptionError(): Throwable = {
+    new UnsupportedOperationException
+  }
+
+  def nullLiteralsCannotCastToError(name: String): Throwable = {
+    new UnsupportedOperationException(s"null literals can't be casted to 
$name")
+  }
+
+  def notUDFError(name: String, userClass: String): Throwable = {
+    new SparkException(s"$name is not an UserDefinedType. Please make sure 
registering " +
+        s"an UserDefinedType for ${userClass}")
+  }
+
+  def cannotLoadUDFError(name: String, userClass: String): Throwable = {

Review comment:
       cannotLoadUserDefinedTypeError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Cannot instantiate abstract catalog plugin class for " 
+
+      s"catalog '$name': $pluginClassName", e.getCause)
+  }
+
+  def failInstantiatingConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Failed during instantiating constructor for catalog " +
+      s"'$name': $pluginClassName", e.getCause)
+  }
+
+  def noSuchElementExceptionError(): Throwable = {
+    new NoSuchElementException
+  }
+
+  def noSuchElementExceptionError(key: String): Throwable = {
+    new NoSuchElementException(key)
+  }
+
+  def cannotMutateReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot mutate ReadOnlySQLConf.")
+  }
+
+  def cannotCloneCopyReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot clone/copy ReadOnlySQLConf.")
+  }
+
+  def cannotGetSQLCOnfInSchedulerEventLoopThread(): Throwable = {

Review comment:
       cannotGetSQLConfInSchedulerEventLoopThreadError

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1422,4 +1423,109 @@ object QueryExecutionErrors {
   def invalidStreamingOutputModeError(outputMode: Option[OutputMode]): 
Throwable = {
     new UnsupportedOperationException(s"Invalid output mode: $outputMode")
   }
+
+  def catalogPluginClassNotFoundError(name: String): Throwable = {
+    new CatalogNotFoundException(
+      s"Catalog '$name' plugin class not found: spark.sql.catalog.$name is not 
defined")
+  }
+
+  def catalogPluginClassNotImplementError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(
+      s"Plugin class for catalog '$name' does not implement CatalogPlugin: 
$pluginClassName")
+  }
+
+  def catalogPluginClassNotFindForCatalogError(name: String, pluginClassName: 
String): Throwable = {
+    new SparkException(s"Cannot find catalog plugin class for catalog '$name': 
$pluginClassName")
+  }
+
+  def catalogFailToFindPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to find public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogFailToCallPublicNoArgConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException(
+      s"Failed to call public no-arg constructor for catalog '$name': 
$pluginClassName)", e)
+  }
+
+  def catalogCannotInstantiateAbstractPluginClassError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Cannot instantiate abstract catalog plugin class for " 
+
+      s"catalog '$name': $pluginClassName", e.getCause)
+  }
+
+  def failInstantiatingConstructorError(
+      name: String,
+      pluginClassName: String,
+      e: Exception): Throwable = {
+    new SparkException("Failed during instantiating constructor for catalog " +
+      s"'$name': $pluginClassName", e.getCause)
+  }
+
+  def noSuchElementExceptionError(): Throwable = {
+    new NoSuchElementException
+  }
+
+  def noSuchElementExceptionError(key: String): Throwable = {
+    new NoSuchElementException(key)
+  }
+
+  def cannotMutateReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot mutate ReadOnlySQLConf.")
+  }
+
+  def cannotCloneCopyReadOnlySQLConfError(): Throwable = {
+    new UnsupportedOperationException("Cannot clone/copy ReadOnlySQLConf.")
+  }
+
+  def cannotGetSQLCOnfInSchedulerEventLoopThread(): Throwable = {
+    new RuntimeException("Cannot get SQLConf inside scheduler event loop 
thread.")
+  }
+
+  def unsupportedOperationExceptionError(): Throwable = {
+    new UnsupportedOperationException
+  }
+
+  def nullLiteralsCannotCastToError(name: String): Throwable = {
+    new UnsupportedOperationException(s"null literals can't be casted to 
$name")
+  }
+
+  def notUDFError(name: String, userClass: String): Throwable = {

Review comment:
       notUserDefinedTypeError




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to