This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bb497eb7e8d [SPARK-46519][SQL] Clear unused error classes from 
`error-classes.json` file
bb497eb7e8d is described below

commit bb497eb7e8de4ec0f5acf75fb48e6f96c66e6bfc
Author: panbingkun <panbing...@baidu.com>
AuthorDate: Thu Dec 28 11:12:44 2023 +0300

    [SPARK-46519][SQL] Clear unused error classes from `error-classes.json` file
    
    ### What changes were proposed in this pull request?
    The pr aims to:
    - Clear unused error classes from `error-classes.json`.
    - Delete unused methods `dataSourceAlreadyExists` in 
`QueryCompilationErrors.scala`
    - Fix an outdated comment.
    
    ### Why are the changes needed?
    Make code clear.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    - Pass GA.
    - Manually test.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #44503 from panbingkun/SPARK-46519.
    
    Authored-by: panbingkun <panbing...@baidu.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/resources/error/error-classes.json    | 32 ----------------------
 ...-error-conditions-invalid-handle-error-class.md |  4 ---
 docs/sql-error-conditions-sqlstates.md             |  2 +-
 docs/sql-error-conditions.md                       | 12 --------
 .../apache/spark/sql/errors/DataTypeErrors.scala   |  2 +-
 .../spark/sql/errors/QueryCompilationErrors.scala  |  6 ----
 .../catalyst/catalog/ExternalCatalogSuite.scala    |  7 +++--
 .../spark/sql/execution/QueryExecutionSuite.scala  |  4 ++-
 .../datasources/parquet/ParquetFilterSuite.scala   |  4 +--
 9 files changed, 11 insertions(+), 62 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 700b1ed0751..9f68d4c5a53 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -875,12 +875,6 @@
     ],
     "sqlState" : "42K01"
   },
-  "DATA_SOURCE_ALREADY_EXISTS" : {
-    "message" : [
-      "Data source '<provider>' already exists in the registry. Please use a 
different name for the new data source."
-    ],
-    "sqlState" : "42710"
-  },
   "DATA_SOURCE_NOT_EXIST" : {
     "message" : [
       "Data source '<provider>' not found. Please make sure the data source is 
registered."
@@ -1480,12 +1474,6 @@
     },
     "sqlState" : "42K0B"
   },
-  "INCORRECT_END_OFFSET" : {
-    "message" : [
-      "Max offset with <rowsPerSecond> rowsPerSecond is <maxSeconds>, but it's 
<endSeconds> now."
-    ],
-    "sqlState" : "22003"
-  },
   "INCORRECT_RAMP_UP_RATE" : {
     "message" : [
       "Max offset with <rowsPerSecond> rowsPerSecond is <maxSeconds>, but 
'rampUpTimeSeconds' is <rampUpTimeSeconds>."
@@ -1906,11 +1894,6 @@
           "Operation not found."
         ]
       },
-      "SESSION_ALREADY_EXISTS" : {
-        "message" : [
-          "Session already exists."
-        ]
-      },
       "SESSION_CLOSED" : {
         "message" : [
           "Session was closed."
@@ -6065,11 +6048,6 @@
       "<walkedTypePath>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2142" : {
-    "message" : [
-      "Attributes for type <schema> is not supported."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2144" : {
     "message" : [
       "Unable to find constructor for <tpe>. This could happen if <tpe> is an 
interface, or a trait without companion object constructor."
@@ -6920,11 +6898,6 @@
       "<clazz>: <msg>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_3066" : {
-    "message" : [
-      "<msg>"
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3067" : {
     "message" : [
       "Streaming aggregation doesn't support group aggregate pandas UDF"
@@ -6980,11 +6953,6 @@
       "More than one event time columns are available. Please ensure there is 
at most one event time column per stream. event time columns: <eventTimeCols>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_3078" : {
-    "message" : [
-      "Can not match ParquetTable in the query."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3079" : {
     "message" : [
       "Dynamic partition cannot be the parent of a static partition."
diff --git a/docs/sql-error-conditions-invalid-handle-error-class.md 
b/docs/sql-error-conditions-invalid-handle-error-class.md
index 14526cd5372..8df8e54a8d9 100644
--- a/docs/sql-error-conditions-invalid-handle-error-class.md
+++ b/docs/sql-error-conditions-invalid-handle-error-class.md
@@ -41,10 +41,6 @@ Operation already exists.
 
 Operation not found.
 
-## SESSION_ALREADY_EXISTS
-
-Session already exists.
-
 ## SESSION_CLOSED
 
 Session was closed.
diff --git a/docs/sql-error-conditions-sqlstates.md 
b/docs/sql-error-conditions-sqlstates.md
index 49cfb56b366..85f1c5c69c3 100644
--- a/docs/sql-error-conditions-sqlstates.md
+++ b/docs/sql-error-conditions-sqlstates.md
@@ -71,7 +71,7 @@ Spark SQL uses the following `SQLSTATE` classes:
 </tr>
 <tr>
   <td></td>
-  <td><a href="arithmetic-overflow-error-class.md">ARITHMETIC_OVERFLOW</a>, <a 
href="sql-error-conditions.html#cast_overflow">CAST_OVERFLOW</a>, <a 
href="sql-error-conditions.html#cast_overflow_in_table_insert">CAST_OVERFLOW_IN_TABLE_INSERT</a>,
 <a 
href="sql-error-conditions.html#decimal_precision_exceeds_max_precision">DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION</a>,
 <a 
href="sql-error-conditions.html#invalid_index_of_zero">INVALID_INDEX_OF_ZERO</a>,
 <a href="sql-error-conditions.html#incor [...]
+  <td><a href="arithmetic-overflow-error-class.md">ARITHMETIC_OVERFLOW</a>, <a 
href="sql-error-conditions.html#cast_overflow">CAST_OVERFLOW</a>, <a 
href="sql-error-conditions.html#cast_overflow_in_table_insert">CAST_OVERFLOW_IN_TABLE_INSERT</a>,
 <a 
href="sql-error-conditions.html#decimal_precision_exceeds_max_precision">DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION</a>,
 <a 
href="sql-error-conditions.html#invalid_index_of_zero">INVALID_INDEX_OF_ZERO</a>,
 <a href="sql-error-conditions.html#incor [...]
   </td>
 </tr>
     <tr>
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index a8d2b6c894b..248839666ef 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -474,12 +474,6 @@ For more details see 
[DATATYPE_MISMATCH](sql-error-conditions-datatype-mismatch-
 
 DataType `<type>` requires a length parameter, for example `<type>`(10). 
Please specify the length.
 
-### DATA_SOURCE_ALREADY_EXISTS
-
-[SQLSTATE: 
42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
-
-Data source '`<provider>`' already exists in the registry. Please use a 
different name for the new data source.
-
 ### DATA_SOURCE_NOT_EXIST
 
 [SQLSTATE: 
42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
@@ -886,12 +880,6 @@ You may get a different result due to the upgrading to
 
 For more details see 
[INCONSISTENT_BEHAVIOR_CROSS_VERSION](sql-error-conditions-inconsistent-behavior-cross-version-error-class.html)
 
-### INCORRECT_END_OFFSET
-
-[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception)
-
-Max offset with `<rowsPerSecond>` rowsPerSecond is `<maxSeconds>`, but it's 
`<endSeconds>` now.
-
 ### INCORRECT_RAMP_UP_RATE
 
 [SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception)
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
index b30f7b7a00e..456a311efda 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
@@ -27,7 +27,7 @@ import org.apache.spark.unsafe.types.UTF8String
 /**
  * Object for grouping error messages from (most) exceptions thrown during 
query execution.
  * This does not include exceptions thrown during the eager execution of 
commands, which are
- * grouped into [[QueryCompilationErrors]].
+ * grouped into [[CompilationErrors]].
  */
 private[sql] object DataTypeErrors extends DataTypeErrorsBase {
   def unsupportedOperationExceptionError(): SparkUnsupportedOperationException 
= {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index ee41cbe2f50..e8235fd1046 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3856,12 +3856,6 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
         "reason" -> reason))
   }
 
-  def dataSourceAlreadyExists(name: String): Throwable = {
-    new AnalysisException(
-      errorClass = "DATA_SOURCE_ALREADY_EXISTS",
-      messageParameters = Map("provider" -> name))
-  }
-
   def dataSourceDoesNotExist(name: String): Throwable = {
     new AnalysisException(
       errorClass = "DATA_SOURCE_NOT_EXIST",
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
index 1ee20a98cfd..ba01f955916 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
@@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns
 import org.apache.spark.sql.connector.catalog.SupportsNamespaces.PROP_OWNER
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
 import org.apache.spark.util.Utils
 
@@ -569,8 +570,10 @@ abstract class ExternalCatalogSuite extends SparkFunSuite {
         // then be caught and converted to a RuntimeException with a 
descriptive message.
         case ex: RuntimeException if ex.getMessage.contains("MetaException") =>
           throw new AnalysisException(
-            errorClass = "_LEGACY_ERROR_TEMP_3066",
-            messageParameters = Map("msg" -> ex.getMessage))
+            errorClass = "_LEGACY_ERROR_TEMP_2193",
+            messageParameters = Map(
+              "hiveMetastorePartitionPruningFallbackOnException" ->
+                
SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION.key))
       }
     }
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
index 583d7fd7ee3..7fa34cfddbf 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala
@@ -160,7 +160,9 @@ class QueryExecutionSuite extends SharedSparkSession {
 
     // Throw an AnalysisException - this should be captured.
     spark.experimental.extraStrategies = Seq[SparkStrategy](
-      (_: LogicalPlan) => throw new 
AnalysisException("_LEGACY_ERROR_TEMP_3078", Map.empty))
+      (_: LogicalPlan) => throw new AnalysisException(
+        "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY",
+        messageParameters = Map("dataSourceType" -> "XXX")))
     assert(qe.toString.contains("org.apache.spark.sql.AnalysisException"))
 
     // Throw an Error - this should not be captured.
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
index da2705f7c72..c27b71ac827 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
@@ -2344,9 +2344,7 @@ class ParquetV2FilterSuite extends ParquetFilterSuite {
 
           checker(stripSparkFilter(query), expected)
 
-        case _ =>
-          throw new AnalysisException(
-            errorClass = "_LEGACY_ERROR_TEMP_3078", messageParameters = 
Map.empty)
+        case _ => assert(false, "Can not match ParquetTable in the query.")
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to