dgd-contributor commented on a change in pull request #32916:
URL: https://github.com/apache/spark/pull/32916#discussion_r654237052



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,46 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +

Review comment:
       why should we use stripMargin.replaceAll("\n", " "), I don't find that 
in scala-type guide

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,46 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +

Review comment:
       ok :/ Done
   

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
##########
@@ -1647,4 +1647,46 @@ private[spark] object QueryCompilationErrors {
   def invalidYearMonthIntervalType(startFieldName: String, endFieldName: 
String): Throwable = {
     new AnalysisException(s"'interval $startFieldName to $endFieldName' is 
invalid.")
   }
+
+  def configRemovedInVersion(configName: String, version: String, comment: 
String): Throwable = {
+    new AnalysisException(
+      s"The SQL config '$configName' was removed in the version $version. 
$comment")
+  }
+
+  def failedFallbackParsingError(msg: String, e1: Throwable, e2: Throwable): 
Throwable = {
+    new AnalysisException(s"$msg${e1.getMessage}\nFailed fallback parsing: 
${e2.getMessage}",
+      cause = Some(e1.getCause))
+  }
+
+  def decimalCannotGreaterThanPrecisionError(scale: Int, precision: Int): 
Throwable = {
+    new AnalysisException(s"Decimal scale ($scale) cannot be greater than 
precision ($precision).")
+  }
+
+  def decimalOnlySupportPrecisionUptoError(decimalType: String, precision: 
Int): Throwable = {
+    new AnalysisException(s"$decimalType can only support precision up to 
$precision")
+  }
+
+  def negativeScaleNotAllowedError(scale: Int): Throwable = {
+    new AnalysisException(s"Negative scale is not allowed: $scale. " +
+      s"You can use ${LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key}=true 
" +
+      s"to enable legacy mode to allow it.")
+  }
+
+  def keyNotValidColumnInTableError(key: String, tblName: String): Throwable = 
{
+    new AnalysisException(s"$key is not a valid partition column in table 
$tblName.")
+  }
+
+  def partitionSpecNotValidError(
+      specKeys: String,
+      partitionColumnNames: Seq[String],
+      tableName: String): Throwable = {
+    new AnalysisException(s"Partition spec is invalid. The spec ($specKeys) 
must match " +

Review comment:
       Done, thanks




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to