This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 470beda2231 [SPARK-41571][SQL] Assign name to _LEGACY_ERROR_TEMP_2310
470beda2231 is described below

commit 470beda2231c89d9cbd609bcf1e83d84c80a7f06
Author: itholic <haejoon....@databricks.com>
AuthorDate: Mon Jan 2 11:53:27 2023 +0500

    [SPARK-41571][SQL] Assign name to _LEGACY_ERROR_TEMP_2310
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to assign name to _LEGACY_ERROR_TEMP_2310, 
"WRITE_STREAM_NOT_ALLOWED".
    
    ### Why are the changes needed?
    
    We should assign proper name to _LEGACY_ERROR_TEMP_*
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
    
    Closes #39285 from itholic/LEGACY_2310.
    
    Authored-by: itholic <haejoon....@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 R/pkg/tests/fulltests/test_streaming.R                         |  3 +--
 core/src/main/resources/error/error-classes.json               | 10 +++++-----
 sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala     |  2 +-
 .../org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala |  8 +++++---
 4 files changed, 12 insertions(+), 11 deletions(-)

diff --git a/R/pkg/tests/fulltests/test_streaming.R 
b/R/pkg/tests/fulltests/test_streaming.R
index cc84a985423..8804471e640 100644
--- a/R/pkg/tests/fulltests/test_streaming.R
+++ b/R/pkg/tests/fulltests/test_streaming.R
@@ -140,8 +140,7 @@ test_that("Non-streaming DataFrame", {
   expect_false(isStreaming(c))
 
   expect_error(write.stream(c, "memory", queryName = "people", outputMode = 
"complete"),
-               paste0(".*(writeStream : analysis error - 'writeStream' can be 
called only on ",
-                      "streaming Dataset/DataFrame).*"))
+               paste0("Error in writeStream : analysis error - 
\\[WRITE_STREAM_NOT_ALLOWED\\].*"))
 })
 
 test_that("Unsupported operation", {
diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 4003fab0685..4687d04bf71 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1618,6 +1618,11 @@
     ],
     "sqlState" : "42000"
   },
+  "WRITE_STREAM_NOT_ALLOWED" : {
+    "message" : [
+      "`writeStream` can be called only on streaming Dataset/DataFrame."
+    ]
+  },
   "WRONG_NUM_ARGS" : {
     "message" : [
       "Invalid number of arguments for the function <functionName>."
@@ -4907,11 +4912,6 @@
       "cannot resolve <sqlExpr> in MERGE command given columns [<cols>]"
     ]
   },
-  "_LEGACY_ERROR_TEMP_2310" : {
-    "message" : [
-      "'writeStream' can be called only on streaming Dataset/DataFrame"
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2311" : {
     "message" : [
       "'writeTo' can not be called on streaming Dataset/DataFrame"
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index 5f6512d4e4b..c8e2a48859d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -3875,7 +3875,7 @@ class Dataset[T] private[sql](
   def writeStream: DataStreamWriter[T] = {
     if (!isStreaming) {
       logicalPlan.failAnalysis(
-        errorClass = "_LEGACY_ERROR_TEMP_2310",
+        errorClass = "WRITE_STREAM_NOT_ALLOWED",
         messageParameters = Map.empty)
     }
     new DataStreamWriter[T](this)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
index 3f2414d2178..17a003dfe8f 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala
@@ -162,9 +162,11 @@ class DataFrameReaderWriterSuite extends QueryTest with 
SharedSparkSession with
         .writeStream
         .start()
     }
-    Seq("'writeStream'", "only", "streaming Dataset/DataFrame").foreach { s =>
-      
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(s.toLowerCase(Locale.ROOT)))
-    }
+    checkError(
+      exception = e,
+      errorClass = "WRITE_STREAM_NOT_ALLOWED",
+      parameters = Map.empty
+    )
   }
 
   test("resolve default source") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to