beliefer commented on a change in pull request #32916:
URL: https://github.com/apache/spark/pull/32916#discussion_r654125640



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,47 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +
+      s"You can use spark.sql.legacy.allowNegativeScaleOfDecimal=true " +
+      s"to enable legacy mode to allow it.")
+  }
+
+  def keyNotValidColumnInTableError(key: String, tblName: String): Throwable = 
{
+    new AnalysisException(s"$key is not a valid partition column in table 
$tblName.")
+  }
+
+  def partitionSpecNotValidError(
+      specKeys: String,
+      partitionColumnNames: Seq[String],
+      tableName: String): Throwable = {
+    new AnalysisException(
+      s"Partition spec is invalid. The spec ($specKeys) must match " +
+        s"the partition spec (${partitionColumnNames.mkString(", ")}) defined 
in " +
+        s"table '$tableName'")

Review comment:
       ```
   """
     | ...
     |...
   """.stripMargin.replaceAll("\n", " ")
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
##########
@@ -45,13 +45,13 @@ case class DecimalType(precision: Int, scale: Int) extends 
FractionalType {
   DecimalType.checkNegativeScale(scale)
 
   if (scale > precision) {
-    throw new AnalysisException(
-      s"Decimal scale ($scale) cannot be greater than precision ($precision).")
+    throw QueryCompilationErrors.decimalCannotGreaterThanPrecisionError(scale, 
precision)
   }
 
   if (precision > DecimalType.MAX_PRECISION) {
-    throw new AnalysisException(
-      s"${DecimalType.simpleString} can only support precision up to 
${DecimalType.MAX_PRECISION}")
+    throw QueryCompilationErrors.decimalOnlySupportPrecisionUptoError(
+      DecimalType.simpleString,
+      DecimalType.MAX_PRECISION)

Review comment:
       ```
   throw QueryCompilationErrors.decimalOnlySupportPrecisionUptoError(
     DecimalType.simpleString, DecimalType.MAX_PRECISION)
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,47 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +
+      s"You can use spark.sql.legacy.allowNegativeScaleOfDecimal=true " +
+      s"to enable legacy mode to allow it.")

Review comment:
       ```
   """
     | ...
     |...
   """.stripMargin.replaceAll("\n", " ")
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,46 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +

Review comment:
       ```
   """
     | ...
     |...
   """.stripMargin.replaceAll("\n", " ")
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,46 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +
+      s"You can use ${LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key}=true 
" +
+      s"to enable legacy mode to allow it.")
+  }
+
+  def keyNotValidColumnInTableError(key: String, tblName: String): Throwable = 
{
+    new AnalysisException(s"$key is not a valid partition column in table 
$tblName.")
+  }
+
+  def partitionSpecNotValidError(
+      specKeys: String,
+      partitionColumnNames: Seq[String],
+      tableName: String): Throwable = {
+    new AnalysisException(s"Partition spec is invalid. The spec ($specKeys) 
must match " +

Review comment:
       ```
   """
     | ...
     |...
   """.stripMargin.replaceAll("\n", " ")
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,46 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +

Review comment:
       You should follows others.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to