This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 5042263f8668 [SPARK-47479][SQL] Optimize cannot write data to 
relations with multiple paths error log
5042263f8668 is described below

commit 5042263f86684090f28d9ae051a2c0aa367058b4
Author: huangxiaoping <1754789...@qq.com>
AuthorDate: Fri Mar 22 10:36:25 2024 +0500

    [SPARK-47479][SQL] Optimize cannot write data to relations with multiple 
paths error log
    
    ### What changes were proposed in this pull request?
    Add paths information to the error log
    
    ### Why are the changes needed?
    Make it easier for users to troubleshoot problems
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #45605 from huangxiaopingRD/SPARK-47479.
    
    Authored-by: huangxiaoping <1754789...@qq.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 common/utils/src/main/resources/error/error-classes.json       | 10 +++++-----
 docs/sql-error-conditions-unsupported-insert-error-class.md    |  4 ++++
 .../org/apache/spark/sql/errors/QueryCompilationErrors.scala   |  7 ++++---
 .../spark/sql/execution/datasources/DataSourceStrategy.scala   |  3 ++-
 4 files changed, 15 insertions(+), 9 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 2c3558232361..091f24d44f66 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -4345,6 +4345,11 @@
       "Can't insert into the target."
     ],
     "subClass" : {
+      "MULTI_PATH" : {
+        "message" : [
+          "Can only write data to relations with a single path but given paths 
are <paths>."
+        ]
+      },
       "NOT_ALLOWED" : {
         "message" : [
           "The target relation <relationId> does not allow insertion."
@@ -5269,11 +5274,6 @@
       "The ordering of partition columns is <partColumns>. All partition 
columns having constant values need to appear before other partition columns 
that do not have an assigned constant value."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1148" : {
-    "message" : [
-      "Can only write data to relations with a single path."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1149" : {
     "message" : [
       "Fail to rebuild expression: missing key <filter> in 
`translatedFilterToExpr`."
diff --git a/docs/sql-error-conditions-unsupported-insert-error-class.md 
b/docs/sql-error-conditions-unsupported-insert-error-class.md
index a8f25440b00a..3f679589fd3a 100644
--- a/docs/sql-error-conditions-unsupported-insert-error-class.md
+++ b/docs/sql-error-conditions-unsupported-insert-error-class.md
@@ -30,6 +30,10 @@ Can't insert into the target.
 
 This error class has the following derived error classes:
 
+## MULTI_PATH
+
+Can only write data to relations with a single path but given paths are 
`<paths>`.
+
 ## NOT_ALLOWED
 
 The target relation `<relationId>` does not allow insertion.
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index c8a2bec8668c..81cd50d5f2d3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1709,10 +1709,11 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
         "partColumns" -> 
targetPartitionSchema.fields.map(_.name).mkString("[", ",", "]")))
   }
 
-  def cannotWriteDataToRelationsWithMultiplePathsError(): Throwable = {
+  def cannotWriteDataToRelationsWithMultiplePathsError(paths: Seq[Path]): 
Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1148",
-      messageParameters = Map.empty)
+      errorClass = "UNSUPPORTED_INSERT.MULTI_PATH",
+      messageParameters = Map(
+        "paths" -> paths.mkString("[", ",", "]")))
   }
 
   def failedToRebuildExpressionError(filter: Filter): Throwable = {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
index 252ab1bcb54e..845d969df088 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
@@ -198,7 +198,8 @@ object DataSourceAnalysis extends Rule[LogicalPlan] {
 
       // Sanity check
       if (t.location.rootPaths.size != 1) {
-        throw 
QueryCompilationErrors.cannotWriteDataToRelationsWithMultiplePathsError()
+        throw QueryCompilationErrors
+          
.cannotWriteDataToRelationsWithMultiplePathsError(t.location.rootPaths)
       }
 
       val outputPath = t.location.rootPaths.head


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to