karenfeng commented on a change in pull request #34093:
URL: https://github.com/apache/spark/pull/34093#discussion_r718729445



##########
File path: core/src/main/scala/org/apache/spark/SparkException.scala
##########
@@ -72,9 +72,47 @@ private[spark] case class ExecutorDeadException(message: 
String)
 /**
  * Exception thrown when Spark returns different result after upgrading to a 
new version.
  */
-private[spark] class SparkUpgradeException(version: String, message: String, 
cause: Throwable)
+private[spark] class SparkUpgradeException(
+    version: String,
+    message: String,
+    cause: Throwable,
+    errorClass: Option[String],
+    messageParameters: Array[String])
   extends RuntimeException("You may get a different result due to the 
upgrading of Spark" +
-    s" $version: $message", cause)
+    s" $version: $message", cause) with SparkThrowable {
+
+  def this(version: String, message: String, cause: Throwable) =
+    this(
+      version = version,
+      message = message,
+      cause = cause,
+      errorClass = None,
+      messageParameters = Array.empty
+    )
+
+  def this(errorClass: String, messageParameters: Array[String], cause: 
Throwable) =

Review comment:
       I don't know if we want to have this constructor - it feels like the 
version should always be provided.

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -39,9 +53,32 @@
     "message" : [ "Found duplicate keys '%s'" ],
     "sqlState" : "23000"
   },
+  "END_OF_STREAM" : {
+    "message" : [ "End of stream" ]
+  },
+  "FAILED_CAST_VALUE_TO_DATATYPE_FOR_PARTITION_COLUMN" : {
+    "message" : [ "Failed to cast value `%s` to `%s` for partition column 
`%s`" ],
+    "sqlState" : "22023"
+  },
   "FAILED_EXECUTE_UDF" : {
     "message" : [ "Failed to execute user defined function (%s: (%s) => %s)" ]
   },
+  "FAILED_FALLBACK_V1_BECAUSE_OF_INCONSISTENT_SCHEMA" : {
+    "message" : [ "The fallback v1 relation reports inconsistent schema:", 
"Schema of v2 scan:     %s", "Schema of v1 relation: %s" ],
+    "sqlState" : "22023"
+  },
+  "FAILED_FIND_DATA_SOURCE" : {
+    "message" : [ "Failed to find data source: %s. Please find packages at 
http://spark.apache.org/third-party-projects.html"; ]
+  },
+  "FAILED_FORMAT_DATETIME_IN_NEW_FORMATTER" : {

Review comment:
       I think we can unify these names. How about: 
`FAILED_FORMAT_DATETIME_IN_SPARK3`?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -39,9 +57,31 @@
     "message" : [ "Found duplicate keys '%s'" ],
     "sqlState" : "23000"
   },
+  "END_OF_STREAM" : {
+    "message" : [ "End of stream" ]
+  },
+  "FAILED_CAST_VALUE_TO_DATATYPE_FOR_PARTITION_COLUMN" : {
+    "message" : [ "Failed to cast value `%s` to `%s` for partition column 
`%s`" ],
+    "sqlState" : "22023"
+  },
   "FAILED_EXECUTE_UDF" : {
     "message" : [ "Failed to execute user defined function (%s: (%s) => %s)" ]
   },
+  "FAILED_FALLBACK_V1_BECAUSE_OF_INCONSISTENT_SCHEMA" : {
+    "message" : [ "The fallback v1 relation reports inconsistent schema:\n 
Schema of v2 scan:     %s\nSchema of v1 relation: %s" ]
+  },
+  "FAILED_FIND_DATASOURCE" : {
+    "message" : [ "Failed to find data source: %s. Please find packages at 
http://spark.apache.org/third-party-projects.html"; ]

Review comment:
       Bump!

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -39,9 +53,32 @@
     "message" : [ "Found duplicate keys '%s'" ],
     "sqlState" : "23000"
   },
+  "END_OF_STREAM" : {
+    "message" : [ "End of stream" ]
+  },
+  "FAILED_CAST_VALUE_TO_DATATYPE_FOR_PARTITION_COLUMN" : {
+    "message" : [ "Failed to cast value `%s` to `%s` for partition column 
`%s`" ],
+    "sqlState" : "22023"
+  },
   "FAILED_EXECUTE_UDF" : {
     "message" : [ "Failed to execute user defined function (%s: (%s) => %s)" ]
   },
+  "FAILED_FALLBACK_V1_BECAUSE_OF_INCONSISTENT_SCHEMA" : {
+    "message" : [ "The fallback v1 relation reports inconsistent schema:", 
"Schema of v2 scan:     %s", "Schema of v1 relation: %s" ],
+    "sqlState" : "22023"
+  },
+  "FAILED_FIND_DATA_SOURCE" : {
+    "message" : [ "Failed to find data source: %s. Please find packages at 
http://spark.apache.org/third-party-projects.html"; ]
+  },
+  "FAILED_FORMAT_DATETIME_IN_NEW_FORMATTER" : {
+    "message" : [ "Fail to format it to '%s' in the new formatter. You can set 
%s to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and 
treat it as an invalid datetime string." ]
+  },
+  "FAILED_PARSE_DATETIME_IN_NEW_PARSER" : {

Review comment:
       How about: `FAILED_PARSE_DATETIME_IN_SPARK3`?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -153,13 +215,24 @@
     "message" : [ "Unsupported literal type %s %s" ],
     "sqlState" : "0A000"
   },
+  "UNSUPPORTED_SAVE_MODE" : {
+    "message" : [ "unsupported save mode %s" ],
+    "sqlState" : "0A000"
+  },
   "UNSUPPORTED_SIMPLE_STRING_WITH_NODE_ID" : {
     "message" : [ "%s does not implement simpleStringWithNodeId" ]
   },
+  "UNSUPPORTED_STREAMED_OPERATOR_BY_DATA_SOURCE" : {
+    "message" : [ "Data source %s does not support streamed %s" ],
+    "sqlState" : "0A000"
+  },
   "UNSUPPORTED_TRANSACTION_BY_JDBC_SERVER" : {
     "message" : [ "The target JDBC server does not support transaction and can 
only support ALTER TABLE with a single action." ],
     "sqlState" : "0A000"
   },
+  "WRITING_AMBIGUOUS_DATES" : {

Review comment:
       For these upgrade class names, should we always include the Spark 
version name? Maybe `WRITING_AMBIGUOUS_NAMES_IN_SPARK3`

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -39,9 +53,32 @@
     "message" : [ "Found duplicate keys '%s'" ],
     "sqlState" : "23000"
   },
+  "END_OF_STREAM" : {
+    "message" : [ "End of stream" ]
+  },
+  "FAILED_CAST_VALUE_TO_DATATYPE_FOR_PARTITION_COLUMN" : {
+    "message" : [ "Failed to cast value `%s` to `%s` for partition column 
`%s`" ],
+    "sqlState" : "22023"
+  },
   "FAILED_EXECUTE_UDF" : {
     "message" : [ "Failed to execute user defined function (%s: (%s) => %s)" ]
   },
+  "FAILED_FALLBACK_V1_BECAUSE_OF_INCONSISTENT_SCHEMA" : {
+    "message" : [ "The fallback v1 relation reports inconsistent schema:", 
"Schema of v2 scan:     %s", "Schema of v1 relation: %s" ],
+    "sqlState" : "22023"
+  },
+  "FAILED_FIND_DATA_SOURCE" : {
+    "message" : [ "Failed to find data source: %s. Please find packages at 
http://spark.apache.org/third-party-projects.html"; ]
+  },
+  "FAILED_FORMAT_DATETIME_IN_NEW_FORMATTER" : {
+    "message" : [ "Fail to format it to '%s' in the new formatter. You can set 
%s to LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and 
treat it as an invalid datetime string." ]
+  },
+  "FAILED_PARSE_DATETIME_IN_NEW_PARSER" : {
+    "message" : [ "Fail to parse '%s' in the new parser. You can set %s to 
LEGACY to restore the behavior before Spark 3.0, or set to CORRECTED and treat 
it as an invalid datetime string." ]
+  },
+  "FAILED_RECOGNIZE_PATTERN_AFTER_UPGRADE" : {

Review comment:
       How about: `FAILED_RECOGNIZE_DATETIME_PATTERN_IN_SPARK3`?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -122,6 +166,9 @@
     "message" : [ "Invalid pivot value '%s': value data type %s does not match 
pivot column data type %s" ],
     "sqlState" : "42000"
   },
+  "READING_AMBIGUOUS_DATES" : {

Review comment:
       For these upgrade class names, should we always include the Spark 
version name? Maybe `READING_AMBIGUOUS_NAMES_IN_SPARK3`




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to