This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e0f44103bc5 [SPARK-40910][SQL] Replace UnsupportedOperationException 
with SparkUnsupportedOperationException
e0f44103bc5 is described below

commit e0f44103bc58588447a692dc0e19ba3aeaef1856
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Thu Oct 27 14:17:40 2022 +0300

    [SPARK-40910][SQL] Replace UnsupportedOperationException with 
SparkUnsupportedOperationException
    
    ### What changes were proposed in this pull request?
    This pr aims to replace UnsupportedOperationException with 
SparkUnsupportedOperationException.
    
    ### Why are the changes needed?
    1.When I work on https://issues.apache.org/jira/browse/SPARK-40889,
    I found `QueryExecutionErrors.unsupportedPartitionTransformError` throw 
**UnsupportedOperationException**(but not 
**SparkUnsupportedOperationException**), it seem not to fit into the new error 
framework.
    
https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala#L71-L72
    
    2.`QueryExecutionErrors.unsupportedPartitionTransformError` throw 
SparkUnsupportedOperationException, but UT catch 
`UnsupportedOperationException`.
    
https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala#L288-L301
    
    
https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala#L904-L909
    
    
https://github.com/apache/spark/blob/a27b459be3ca2ad2d50b9d793b939071ca2270e2/core/src/main/scala/org/apache/spark/SparkException.scala#L144-L154
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Existed UT.
    
    Closes #38387 from panbingkun/replace_UnsupportedOperationException.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |  5 ++++
 .../sql/connector/catalog/CatalogV2Implicits.scala |  4 +++-
 .../spark/sql/errors/QueryExecutionErrors.scala    |  4 ++--
 .../execution/command/PlanResolutionSuite.scala    | 27 +++++++++++-----------
 4 files changed, 23 insertions(+), 17 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 015d86171d7..16347f89463 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -964,6 +964,11 @@
           "Literal for '<value>' of <type>."
         ]
       },
+      "MULTIPLE_BUCKET_TRANSFORMS" : {
+        "message" : [
+          "Multiple bucket TRANSFORMs."
+        ]
+      },
       "NATURAL_CROSS_JOIN" : {
         "message" : [
           "NATURAL CROSS JOIN."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
index 91809b6176c..d9f15d84d89 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
@@ -60,7 +60,9 @@ private[sql] object CatalogV2Implicits {
           identityCols += col
 
         case BucketTransform(numBuckets, col, sortCol) =>
-          if (bucketSpec.nonEmpty) throw 
QueryExecutionErrors.multipleBucketTransformsError
+          if (bucketSpec.nonEmpty) {
+            throw QueryExecutionErrors.unsupportedMultipleBucketTransformsError
+          }
           if (sortCol.isEmpty) {
             bucketSpec = Some(BucketSpec(numBuckets, 
col.map(_.fieldNames.mkString(".")), Nil))
           } else {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 7e870e23fba..ba78858debc 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -2623,9 +2623,9 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
         "format" -> format))
   }
 
-  def multipleBucketTransformsError(): SparkUnsupportedOperationException = {
+  def unsupportedMultipleBucketTransformsError(): 
SparkUnsupportedOperationException = {
     new SparkUnsupportedOperationException(
-      errorClass = "_LEGACY_ERROR_TEMP_2279",
+      errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS",
       messageParameters = Map.empty)
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
index 6276b1a3b60..3b2271afc86 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
@@ -24,6 +24,7 @@ import org.mockito.ArgumentMatchers.any
 import org.mockito.Mockito.{mock, when}
 import org.mockito.invocation.InvocationOnMock
 
+import org.apache.spark.SparkUnsupportedOperationException
 import org.apache.spark.sql.{AnalysisException, SaveMode}
 import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis.{AnalysisContext, AnalysisTest, 
Analyzer, EmptyFunctionRegistry, NoSuchTableException, ResolvedFieldName, 
ResolvedIdentifier, ResolvedTable, ResolveSessionCatalog, UnresolvedAttribute, 
UnresolvedInlineTable, UnresolvedRelation, UnresolvedSubqueryColumnAliases, 
UnresolvedTable}
@@ -292,13 +293,12 @@ class PlanResolutionSuite extends AnalysisTest {
            |CREATE TABLE my_tab(a INT, b STRING) USING parquet
            |PARTITIONED BY ($transform)
            """.stripMargin
-
-      val ae = intercept[UnsupportedOperationException] {
-        parseAndResolve(query)
-      }
-
-      assert(ae.getMessage
-        .contains(s"Unsupported partition transform: $transform"))
+      checkError(
+        exception = intercept[SparkUnsupportedOperationException] {
+          parseAndResolve(query)
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_2067",
+        parameters = Map("transform" -> transform))
     }
   }
 
@@ -310,13 +310,12 @@ class PlanResolutionSuite extends AnalysisTest {
            |CREATE TABLE my_tab(a INT, b STRING, c String) USING parquet
            |PARTITIONED BY ($transform)
            """.stripMargin
-
-      val ae = intercept[UnsupportedOperationException] {
-        parseAndResolve(query)
-      }
-
-      assert(ae.getMessage
-        .contains("Multiple bucket transforms are not supported."))
+      checkError(
+        exception = intercept[SparkUnsupportedOperationException] {
+          parseAndResolve(query)
+        },
+        errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS",
+        parameters = Map.empty)
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to