This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 5590c9a4654 [SPARK-43794][SQL] Assign a name to the error class 
_LEGACY_ERROR_TEMP_1335
5590c9a4654 is described below

commit 5590c9a4654607488379703581e341d4062f9666
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Fri May 26 16:37:01 2023 +0300

    [SPARK-43794][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_1335
    
    ### What changes were proposed in this pull request?
    The pr aims to assign a name to the error class _LEGACY_ERROR_TEMP_1335.
    
    ### Why are the changes needed?
    The changes improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Update existed UT.
    Pass GA.
    
    Closes #41314 from panbingkun/SPARK-43794.
    
    Lead-authored-by: panbingkun <pbk1...@gmail.com>
    Co-authored-by: panbingkun <84731...@qq.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 22 ++++++++++++++++++++++
 .../sql/catalyst/analysis/TimeTravelSpec.scala     | 12 ++++++++----
 .../spark/sql/errors/QueryCompilationErrors.scala  |  6 +++---
 .../spark/sql/connector/DataSourceV2SQLSuite.scala | 17 +++++++++--------
 4 files changed, 42 insertions(+), 15 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index bbf0368ac59..738e037c39d 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1326,6 +1326,28 @@
       "Cannot create the persistent object <objName> of the type <obj> because 
it references to the temporary object <tempObjName> of the type <tempObj>. 
Please make the temporary object <tempObjName> persistent, or make the 
persistent object <objName> temporary."
     ]
   },
+  "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR" : {
+    "message" : [
+      "The time travel timestamp expression <expr> is invalid."
+    ],
+    "subClass" : {
+      "INPUT" : {
+        "message" : [
+          "Cannot be casted to the \"TIMESTAMP\" type."
+        ]
+      },
+      "NON_DETERMINISTIC" : {
+        "message" : [
+          "Must be deterministic."
+        ]
+      },
+      "UNEVALUABLE" : {
+        "message" : [
+          "Must be evaluable."
+        ]
+      }
+    }
+  },
   "INVALID_TYPED_LITERAL" : {
     "message" : [
       "The value of the typed literal <valueType> is invalid: <value>."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala
index e33ddbb3213..26856d9a5e0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TimeTravelSpec.scala
@@ -38,21 +38,25 @@ object TimeTravelSpec {
       val ts = timestamp.get
       assert(ts.resolved && ts.references.isEmpty && 
!SubqueryExpression.hasSubquery(ts))
       if (!Cast.canAnsiCast(ts.dataType, TimestampType)) {
-        throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(ts)
+        throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(
+          "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.INPUT", ts)
       }
       val tsToEval = ts.transform {
         case r: RuntimeReplaceable => r.replacement
         case _: Unevaluable =>
-          throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(ts)
+          throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(
+            "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.UNEVALUABLE", ts)
         case e if !e.deterministic =>
-          throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(ts)
+          throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(
+            "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.NON_DETERMINISTIC", ts)
       }
       val tz = Some(conf.sessionLocalTimeZone)
       // Set `ansiEnabled` to false, so that it can return null for invalid 
input and we can provide
       // better error message.
       val value = Cast(tsToEval, TimestampType, tz, ansiEnabled = false).eval()
       if (value == null) {
-        throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(ts)
+        throw QueryCompilationErrors.invalidTimestampExprForTimeTravel(
+          "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.INPUT", ts)
       }
       Some(AsOfTimestamp(value.asInstanceOf[Long]))
     } else if (version.nonEmpty) {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 9921f50014d..4b004eb8fd1 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3130,10 +3130,10 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map.empty)
   }
 
-  def invalidTimestampExprForTimeTravel(expr: Expression): Throwable = {
+  def invalidTimestampExprForTimeTravel(errorClass: String, expr: Expression): 
Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1335",
-      messageParameters = Map("expr" -> expr.sql))
+      errorClass = errorClass,
+      messageParameters = Map("expr" -> toSQLExpr(expr)))
   }
 
   def timeTravelUnsupportedError(relationId: String): Throwable = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 1aa216cf7ce..ff52239a1d9 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -2931,29 +2931,30 @@ class DataSourceV2SQLSuiteV1Filter
         exception = intercept[AnalysisException] {
           sql("SELECT * FROM t TIMESTAMP AS OF INTERVAL 1 DAY").collect()
         },
-        errorClass = "_LEGACY_ERROR_TEMP_1335",
-        parameters = Map("expr" -> "INTERVAL '1' DAY"))
+        errorClass = "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.INPUT",
+        parameters = Map(
+          "expr" -> "\"INTERVAL '1' DAY\""))
 
       checkError(
         exception = intercept[AnalysisException] {
           sql("SELECT * FROM t TIMESTAMP AS OF 'abc'").collect()
         },
-        errorClass = "_LEGACY_ERROR_TEMP_1335",
-        parameters = Map("expr" -> "'abc'"))
+        errorClass = "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.INPUT",
+        parameters = Map("expr" -> "\"abc\""))
 
       checkError(
         exception = intercept[AnalysisException] {
           sql("SELECT * FROM t TIMESTAMP AS OF current_user()").collect()
         },
-        errorClass = "_LEGACY_ERROR_TEMP_1335",
-        parameters = Map("expr" -> "current_user()"))
+        errorClass = "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.UNEVALUABLE",
+        parameters = Map("expr" -> "\"current_user()\""))
 
       checkError(
         exception = intercept[AnalysisException] {
           sql("SELECT * FROM t TIMESTAMP AS OF CAST(rand() AS 
STRING)").collect()
         },
-        errorClass = "_LEGACY_ERROR_TEMP_1335",
-        parameters = Map("expr" -> "CAST(rand() AS STRING)"))
+        errorClass = "INVALID_TIME_TRAVEL_TIMESTAMP_EXPR.NON_DETERMINISTIC",
+        parameters = Map("expr" -> "\"CAST(rand() AS STRING)\""))
 
       checkError(
         exception = intercept[AnalysisException] {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to