This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 69c46876b5a7 [SPARK-46576][SQL] Improve error messages for unsupported 
data source save mode
69c46876b5a7 is described below

commit 69c46876b5a76c2de6a149ea7663fad18027e387
Author: allisonwang-db <allison.w...@databricks.com>
AuthorDate: Thu Jan 4 09:40:40 2024 +0300

    [SPARK-46576][SQL] Improve error messages for unsupported data source save 
mode
    
    ### What changes were proposed in this pull request?
    
    This PR renames the error class `_LEGACY_ERROR_TEMP_1308` to 
`UNSUPPORTED_DATA_SOURCE_SAVE_MODE` and improves its error messages.
    
    ### Why are the changes needed?
    
    To make the error more user-friendly.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    New unit tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #44576 from allisonwang-db/spark-46576-unsupported-save-mode.
    
    Authored-by: allisonwang-db <allison.w...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/resources/error/error-classes.json     | 11 ++++++-----
 .../apache/spark/sql/kafka010/KafkaSinkSuite.scala  |  2 +-
 docs/sql-error-conditions.md                        |  6 ++++++
 .../spark/sql/errors/QueryCompilationErrors.scala   |  4 ++--
 .../spark/sql/connector/DataSourceV2Suite.scala     |  8 ++++----
 .../execution/python/PythonDataSourceSuite.scala    | 21 +++++++++++++++------
 6 files changed, 34 insertions(+), 18 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index bcaf8a74c08d..9cade1197dca 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -3588,6 +3588,12 @@
     ],
     "sqlState" : "0A000"
   },
+  "UNSUPPORTED_DATA_SOURCE_SAVE_MODE" : {
+    "message" : [
+      "The data source '<source>' cannot be written in the <createMode> mode. 
Please use either the \"Append\" or \"Overwrite\" mode instead."
+    ],
+    "sqlState" : "0A000"
+  },
   "UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE" : {
     "message" : [
       "The <format> datasource doesn't support the column <columnName> of the 
type <columnType>."
@@ -5403,11 +5409,6 @@
       "There is a 'path' option set and save() is called with a path 
parameter. Either remove the path option, or call save() without the parameter. 
To ignore this check, set '<config>' to 'true'."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1308" : {
-    "message" : [
-      "TableProvider implementation <source> cannot be written with 
<createMode> mode, please use Append or Overwrite modes instead."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1309" : {
     "message" : [
       "insertInto() can't be used together with partitionBy(). Partition 
columns have already been defined for the table. It is not necessary to use 
partitionBy()."
diff --git 
a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala
 
b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala
index 6753f8be54bf..5566785c4d56 100644
--- 
a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala
+++ 
b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala
@@ -557,7 +557,7 @@ class KafkaSinkBatchSuiteV2 extends KafkaSinkBatchSuiteBase 
{
 
   test("batch - unsupported save modes") {
     testUnsupportedSaveModes((mode) =>
-      Seq(s"cannot be written with ${mode.name} mode", "does not support 
truncate"))
+      Seq(s"cannot be written in the \"${mode.name}\" mode", "does not support 
truncate"))
   }
 
   test("generic - write big data with small producer buffer") {
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index c6108e97b4c5..89de607b0f22 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -2332,6 +2332,12 @@ Unsupported data source type for direct query on files: 
`<dataSourceType>`
 
 Unsupported data type `<typeName>`.
 
+### UNSUPPORTED_DATA_SOURCE_SAVE_MODE
+
+[SQLSTATE: 
0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
+
+The data source '`<source>`' cannot be written in the `<createMode>` mode. 
Please use either the "Append" or "Overwrite" mode instead.
+
 ### UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE
 
 [SQLSTATE: 
0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index b844ee2bdc45..90e7ab610f7a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3193,10 +3193,10 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
 
   def writeWithSaveModeUnsupportedBySourceError(source: String, createMode: 
String): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1308",
+      errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE",
       messageParameters = Map(
         "source" -> source,
-        "createMode" -> createMode))
+        "createMode" -> toDSOption(createMode)))
   }
 
   def partitionByDoesNotAllowedWhenUsingInsertIntoError(): Throwable = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
index ea263b36c76c..fbcbf287b455 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala
@@ -454,10 +454,10 @@ class DataSourceV2Suite extends QueryTest with 
SharedSparkSession with AdaptiveS
               .write.format(cls.getName)
               .option("path", path).mode("ignore").save()
           },
-          errorClass = "_LEGACY_ERROR_TEMP_1308",
+          errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE",
           parameters = Map(
             "source" -> cls.getName,
-            "createMode" -> "Ignore"
+            "createMode" -> "\"Ignore\""
           )
         )
 
@@ -467,10 +467,10 @@ class DataSourceV2Suite extends QueryTest with 
SharedSparkSession with AdaptiveS
               .write.format(cls.getName)
               .option("path", path).mode("error").save()
           },
-          errorClass = "_LEGACY_ERROR_TEMP_1308",
+          errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE",
           parameters = Map(
             "source" -> cls.getName,
-            "createMode" -> "ErrorIfExists"
+            "createMode" -> "\"ErrorIfExists\""
           )
         )
       }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala
index 487951912bb0..a4614c2b4bad 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala
@@ -622,12 +622,21 @@ class PythonDataSourceSuite extends QueryTest with 
SharedSparkSession {
     }
 
     withClue("without mode") {
-      val error = intercept[AnalysisException] {
-        spark.range(1).write.format(dataSourceName).save()
-      }
-      // TODO: improve this error message.
-      assert(error.getMessage.contains("TableProvider implementation 
SimpleDataSource " +
-        "cannot be written with ErrorIfExists mode, please use Append or 
Overwrite modes instead."))
+      checkError(
+        exception = intercept[AnalysisException] {
+          spark.range(1).write.format(dataSourceName).save()
+        },
+        errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE",
+        parameters = Map("source" -> "SimpleDataSource", "createMode" -> 
"\"ErrorIfExists\""))
+    }
+
+    withClue("with unsupported mode") {
+      checkError(
+        exception = intercept[AnalysisException] {
+          spark.range(1).write.format(dataSourceName).mode("ignore").save()
+        },
+        errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE",
+        parameters = Map("source" -> "SimpleDataSource", "createMode" -> 
"\"Ignore\""))
     }
 
     withClue("invalid mode") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to