This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 13d9dd11a84 [SPARK-41579][SQL] Assign name to _LEGACY_ERROR_TEMP_1249
13d9dd11a84 is described below

commit 13d9dd11a842494f9306bde83a867231798e872c
Author: itholic <haejoon....@databricks.com>
AuthorDate: Wed Jan 18 17:19:38 2023 +0800

    [SPARK-41579][SQL] Assign name to _LEGACY_ERROR_TEMP_1249
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to assign name to _LEGACY_ERROR_TEMP_1249, 
"NOT_A_PARTITIONED_TABLE".
    
    ### Why are the changes needed?
    
    We should assign proper name to _LEGACY_ERROR_TEMP_*
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
    
    Closes #39260 from itholic/LEGACY_1249.
    
    Authored-by: itholic <haejoon....@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 core/src/main/resources/error/error-classes.json         | 15 ++++++++++-----
 .../apache/spark/sql/errors/QueryCompilationErrors.scala | 12 ++++++------
 .../command/v1/AlterTableRecoverPartitionsSuite.scala    | 16 ++++++++++++++++
 3 files changed, 32 insertions(+), 11 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 39c23054945..2570ffeba3b 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1006,6 +1006,11 @@
     ],
     "sqlState" : "42000"
   },
+  "NOT_A_PARTITIONED_TABLE" : {
+    "message" : [
+      "Operation <operation> is not allowed for <tableIdentWithDB> because it 
is not a partitioned table."
+    ]
+  },
   "NO_HANDLER_FOR_UDAF" : {
     "message" : [
       "No handler for UDAF '<functionName>'. Use 
sparkSession.udf.register(...) instead."
@@ -3062,11 +3067,6 @@
       "Operation not allowed: ALTER TABLE SET SERDE is not supported for 
tables created with the datasource API."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1249" : {
-    "message" : [
-      "Operation not allowed: <cmd> only works on partitioned tables: 
<tableIdentWithDB>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1250" : {
     "message" : [
       "<action> is not allowed on <tableName> since filesource partition 
management is disabled (spark.sql.hive.manageFilesourcePartitions = false)."
@@ -5293,5 +5293,10 @@
     "message" : [
       "grouping() can only be used with GroupingSets/Cube/Rollup"
     ]
+  },
+  "_LEGACY_ERROR_TEMP_2446" : {
+    "message" : [
+      "Operation not allowed: <cmd> only works on table with location 
provided: <tableIdentWithDB>"
+    ]
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 69128d748ce..83b69d700ac 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -2393,22 +2393,22 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map.empty)
   }
 
-  def cmdOnlyWorksOnPartitionedTablesError(cmd: String, tableIdentWithDB: 
String): Throwable = {
+  def cmdOnlyWorksOnPartitionedTablesError(
+      operation: String,
+      tableIdentWithDB: String): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1249",
+      errorClass = "NOT_A_PARTITIONED_TABLE",
       messageParameters = Map(
-        "cmd" -> cmd,
+        "operation" -> toSQLStmt(operation),
         "tableIdentWithDB" -> tableIdentWithDB))
   }
 
   def cmdOnlyWorksOnTableWithLocationError(cmd: String, tableIdentWithDB: 
String): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1249",
+      errorClass = "_LEGACY_ERROR_TEMP_2446",
       messageParameters = Map(
         "cmd" -> cmd,
         "tableIdentWithDB" -> tableIdentWithDB))
-    new AnalysisException(s"Operation not allowed: $cmd only works on table 
with " +
-      s"location provided: $tableIdentWithDB")
   }
 
   def actionNotAllowedOnTableWithFilesourcePartitionManagementDisabledError(
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableRecoverPartitionsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableRecoverPartitionsSuite.scala
index 32963f5fc2d..b219e21a3d8 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableRecoverPartitionsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableRecoverPartitionsSuite.scala
@@ -114,6 +114,22 @@ trait AlterTableRecoverPartitionsSuiteBase extends 
command.AlterTableRecoverPart
       checkPartitions(t, expected: _*)
     }
   }
+
+  test("ALTER TABLE .. RECOVER PARTITIONS is not allowed for non-partitioned 
table") {
+    withTable("tbl") {
+      sql("CREATE TABLE tbl(col1 int, col2 string) USING parquet")
+      val exception = intercept[AnalysisException] {
+        sql("ALTER TABLE tbl RECOVER PARTITIONS")
+      }
+      checkError(
+        exception = exception,
+        errorClass = "NOT_A_PARTITIONED_TABLE",
+        parameters = Map(
+          "operation" -> "ALTER TABLE RECOVER PARTITIONS",
+          "tableIdentWithDB" -> "`spark_catalog`.`default`.`tbl`")
+      )
+    }
+  }
 }
 
 /**


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to