This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3a6d2153b93 [SPARK-43749][SPARK-43750][SQL] Assign names to the error 
class _LEGACY_ERROR_TEMP_240[4-5]
3a6d2153b93 is described below

commit 3a6d2153b93c759b68e5827905d1867ba93ec9cf
Author: Jiaan Geng <belie...@163.com>
AuthorDate: Thu May 25 20:14:00 2023 +0300

    [SPARK-43749][SPARK-43750][SQL] Assign names to the error class 
_LEGACY_ERROR_TEMP_240[4-5]
    
    ### What changes were proposed in this pull request?
    The pr aims to assign a name to the error class _LEGACY_ERROR_TEMP_240[4-5].
    
    ### Why are the changes needed?
    Improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    
    ### How was this patch tested?
    N/A
    
    Closes #41279 from beliefer/INVALID_PARTITION_OPERATION.
    
    Authored-by: Jiaan Geng <belie...@163.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 29 +++++++++------
 .../sql/catalyst/analysis/CheckAnalysis.scala      |  8 ++---
 .../command/ShowPartitionsSuiteBase.scala          | 12 -------
 .../execution/command/v1/ShowPartitionsSuite.scala | 18 ++++++++++
 .../command/v2/AlterTableAddPartitionSuite.scala   | 20 ++++++++---
 .../command/v2/AlterTableDropPartitionSuite.scala  | 19 +++++++---
 .../execution/command/v2/ShowPartitionsSuite.scala | 41 +++++++++++++++++++---
 .../execution/command/v2/TruncateTableSuite.scala  | 20 ++++++++---
 8 files changed, 122 insertions(+), 45 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 1ccbdfdc6eb..7683e7b8650 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1156,6 +1156,23 @@
     },
     "sqlState" : "22023"
   },
+  "INVALID_PARTITION_OPERATION" : {
+    "message" : [
+      "The partition command is invalid."
+    ],
+    "subClass" : {
+      "PARTITION_MANAGEMENT_IS_UNSUPPORTED" : {
+        "message" : [
+          "Table <name> does not support partition management."
+        ]
+      },
+      "PARTITION_SCHEMA_IS_EMPTY" : {
+        "message" : [
+          "Table <name> is not partitioned."
+        ]
+      }
+    }
+  },
   "INVALID_PROPERTY_KEY" : {
     "message" : [
       "<key> is an invalid property key, please use quotes, e.g. SET 
<key>=<value>."
@@ -5374,16 +5391,6 @@
       "failed to evaluate expression <sqlExpr>: <msg>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_2404" : {
-    "message" : [
-      "Table <name> is not partitioned."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2405" : {
-    "message" : [
-      "Table <name> does not support partition management."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2406" : {
     "message" : [
       "invalid cast from <srcType> to <targetType>."
@@ -5772,4 +5779,4 @@
       "Failed to get block <blockId>, which is not a shuffle block"
     ]
   }
-}
\ No newline at end of file
+}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 407a9d363f4..fac3f491200 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -211,13 +211,13 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
             case t: SupportsPartitionManagement =>
               if (t.partitionSchema.isEmpty) {
                 r.failAnalysis(
-                  errorClass = "_LEGACY_ERROR_TEMP_2404",
-                  messageParameters = Map("name" -> r.name))
+                  errorClass = 
"INVALID_PARTITION_OPERATION.PARTITION_SCHEMA_IS_EMPTY",
+                  messageParameters = Map("name" -> toSQLId(r.name)))
               }
             case _ =>
               r.failAnalysis(
-                errorClass = "_LEGACY_ERROR_TEMP_2405",
-                messageParameters = Map("name" -> r.name))
+                errorClass = 
"INVALID_PARTITION_OPERATION.PARTITION_MANAGEMENT_IS_UNSUPPORTED",
+                messageParameters = Map("name" -> toSQLId(r.name)))
           }
           case _ =>
         }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
index 27d2eb98543..462b967a759 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
@@ -63,18 +63,6 @@ trait ShowPartitionsSuiteBase extends QueryTest with 
DDLCommandTestUtils {
       .saveAsTable(table)
   }
 
-  test("show partitions of non-partitioned table") {
-    withNamespaceAndTable("ns", "not_partitioned_table") { t =>
-      sql(s"CREATE TABLE $t (col1 int) $defaultUsing")
-      val errMsg = intercept[AnalysisException] {
-        sql(s"SHOW PARTITIONS $t")
-      }.getMessage
-      assert(errMsg.contains("not allowed on a table that is not partitioned") 
||
-        // V2 error message.
-        errMsg.contains(s"Table $t is not partitioned"))
-    }
-  }
-
   test("non-partitioning columns") {
     withNamespaceAndTable("ns", "dateTable") { t =>
       createDateTable(t)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala
index a26e29706e1..e67ed807a87 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala
@@ -18,6 +18,8 @@
 package org.apache.spark.sql.execution.command.v1
 
 import org.apache.spark.sql.{AnalysisException, Row, SaveMode}
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.util.quoteIdentifier
 import org.apache.spark.sql.execution.command
 
 /**
@@ -117,6 +119,22 @@ class ShowPartitionsSuite extends ShowPartitionsSuiteBase 
with CommandSuiteBase
     }
   }
 
+  test("show partitions of non-partitioned table") {
+    withNamespaceAndTable("ns", "not_partitioned_table") { t =>
+      sql(s"CREATE TABLE $t (col1 int) $defaultUsing")
+      val sqlText = s"SHOW PARTITIONS $t"
+      val tableName =
+        
UnresolvedAttribute.parseAttributeName(t).map(quoteIdentifier).mkString(".")
+
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sqlText)
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1269",
+        parameters = Map("tableIdentWithDB" -> tableName))
+    }
+  }
+
   test("SPARK-33904: null and empty string as partition values") {
     withNamespaceAndTable("ns", "tbl") { t =>
       createNullPartTable(t, "parquet")
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
index 09ebd4af4ec..b733666f0d8 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
@@ -19,7 +19,8 @@ package org.apache.spark.sql.execution.command.v2
 
 import org.apache.spark.SparkNumberFormatException
 import org.apache.spark.sql.{AnalysisException, Row}
-import org.apache.spark.sql.catalyst.analysis.PartitionsAlreadyExistException
+import 
org.apache.spark.sql.catalyst.analysis.{PartitionsAlreadyExistException, 
UnresolvedAttribute}
+import org.apache.spark.sql.catalyst.util.quoteIdentifier
 import org.apache.spark.sql.execution.command
 import org.apache.spark.sql.internal.SQLConf
 
@@ -35,10 +36,19 @@ class AlterTableAddPartitionSuite
   test("SPARK-33650: add partition into a table which doesn't support 
partition management") {
     withNamespaceAndTable("ns", "tbl", s"non_part_$catalog") { t =>
       sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing")
-      val errMsg = intercept[AnalysisException] {
-        sql(s"ALTER TABLE $t ADD PARTITION (id=1)")
-      }.getMessage
-      assert(errMsg.contains(s"Table $t does not support partition 
management"))
+      val tableName = 
UnresolvedAttribute.parseAttributeName(t).map(quoteIdentifier).mkString(".")
+      val sqlText = s"ALTER TABLE $t ADD PARTITION (id=1)"
+
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sqlText)
+        },
+        errorClass = 
"INVALID_PARTITION_OPERATION.PARTITION_MANAGEMENT_IS_UNSUPPORTED",
+        parameters = Map("name" -> tableName),
+        context = ExpectedContext(
+          fragment = t,
+          start = 12,
+          stop = 39))
     }
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala
index b03c8fb17f5..f2d90990257 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala
@@ -18,6 +18,8 @@
 package org.apache.spark.sql.execution.command.v2
 
 import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.util.quoteIdentifier
 import org.apache.spark.sql.execution.command
 
 /**
@@ -33,10 +35,19 @@ class AlterTableDropPartitionSuite
   test("SPARK-33650: drop partition into a table which doesn't support 
partition management") {
     withNamespaceAndTable("ns", "tbl", s"non_part_$catalog") { t =>
       sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing")
-      val errMsg = intercept[AnalysisException] {
-        sql(s"ALTER TABLE $t DROP PARTITION (id=1)")
-      }.getMessage
-      assert(errMsg.contains(s"Table $t does not support partition 
management"))
+      val tableName = 
UnresolvedAttribute.parseAttributeName(t).map(quoteIdentifier).mkString(".")
+      val sqlText = s"ALTER TABLE $t ADD PARTITION (id=1)"
+
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sqlText)
+        },
+        errorClass = 
"INVALID_PARTITION_OPERATION.PARTITION_MANAGEMENT_IS_UNSUPPORTED",
+        parameters = Map("name" -> tableName),
+        context = ExpectedContext(
+          fragment = t,
+          start = 12,
+          stop = 39))
     }
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala
index 8ae8171924c..203ef4314ad 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala
@@ -18,22 +18,55 @@
 package org.apache.spark.sql.execution.command.v2
 
 import org.apache.spark.sql.{AnalysisException, Row}
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.util.quoteIdentifier
 import org.apache.spark.sql.execution.command
 
 /**
  * The class contains tests for the `SHOW PARTITIONS` command to check V2 
table catalogs.
  */
 class ShowPartitionsSuite extends command.ShowPartitionsSuiteBase with 
CommandSuiteBase {
+
+  test("show partitions of non-partitioned table") {
+    withNamespaceAndTable("ns", "not_partitioned_table") { t =>
+      sql(s"CREATE TABLE $t (col1 int) $defaultUsing")
+      val sqlText = s"SHOW PARTITIONS $t"
+      val tableName =
+        
UnresolvedAttribute.parseAttributeName(t).map(quoteIdentifier).mkString(".")
+
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sqlText)
+        },
+        errorClass = "INVALID_PARTITION_OPERATION.PARTITION_SCHEMA_IS_EMPTY",
+        parameters = Map("name" -> tableName),
+        context = ExpectedContext(
+          fragment = t,
+          start = 16,
+          stop = sqlText.length - 1))
+    }
+  }
+
   test("a table does not support partitioning") {
     val table = s"non_part_$catalog.tab1"
     withTable(table) {
       sql(s"""
         |CREATE TABLE $table (price int, qty int, year int, month int)
         |$defaultUsing""".stripMargin)
-      val errMsg = intercept[AnalysisException] {
-        sql(s"SHOW PARTITIONS $table")
-      }.getMessage
-      assert(errMsg.contains(s"Table $table does not support partition 
management"))
+      val tableName =
+        
UnresolvedAttribute.parseAttributeName(table).map(quoteIdentifier).mkString(".")
+      val sqlText = s"SHOW PARTITIONS $table"
+
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sqlText)
+        },
+        errorClass = 
"INVALID_PARTITION_OPERATION.PARTITION_MANAGEMENT_IS_UNSUPPORTED",
+        parameters = Map("name" -> tableName),
+        context = ExpectedContext(
+          fragment = table,
+          start = 16,
+          stop = sqlText.length - 1))
     }
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/TruncateTableSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/TruncateTableSuite.scala
index f125a72bd32..36b994c21a0 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/TruncateTableSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/TruncateTableSuite.scala
@@ -18,6 +18,8 @@
 package org.apache.spark.sql.execution.command.v2
 
 import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
+import org.apache.spark.sql.catalyst.util.quoteIdentifier
 import org.apache.spark.sql.execution.command
 
 /**
@@ -31,12 +33,20 @@ class TruncateTableSuite extends 
command.TruncateTableSuiteBase with CommandSuit
     withNamespaceAndTable("ns", "tbl", s"non_part_$catalog") { t =>
       sql(s"CREATE TABLE $t (c0 INT) $defaultUsing")
       sql(s"INSERT INTO $t SELECT 0")
+      val sqlText = s"TRUNCATE TABLE $t PARTITION (c0=1)"
+      val tableName =
+        
UnresolvedAttribute.parseAttributeName(t).map(quoteIdentifier).mkString(".")
 
-      val errMsg = intercept[AnalysisException] {
-        sql(s"TRUNCATE TABLE $t PARTITION (c0=1)")
-      }.getMessage
-      assert(errMsg.contains(
-        "Table non_part_test_catalog.ns.tbl does not support partition 
management"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sqlText)
+        },
+        errorClass = 
"INVALID_PARTITION_OPERATION.PARTITION_MANAGEMENT_IS_UNSUPPORTED",
+        parameters = Map("name" -> tableName),
+        context = ExpectedContext(
+          fragment = t,
+          start = 15,
+          stop = 42))
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to