MaxGekk commented on code in PR #36220:
URL: https://github.com/apache/spark/pull/36220#discussion_r852224161


##########
sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala:
##########
@@ -389,15 +389,19 @@ class QueryExecutionErrorsSuite extends QueryTest
     }
   }
 
-  test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") {
-    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
-      val e = intercept[SparkDateTimeException] {
-        sql("select make_timestamp(2012, 11, 30, 9, 19, 
60.66666666)").collect()
-      }
-      assert(e.getErrorClass === "INVALID_FRACTION_OF_SECOND")
-      assert(e.getSqlState === "22023")
-      assert(e.getMessage === "The fraction of sec must be zero. Valid range 
is [0, 60]. " +
-        "If necessary set spark.sql.ansi.enabled to false to bypass this 
error. ")
+  test("FAILED_EXECUTE_UDF: execute user defined function") {
+    val e1 = intercept[SparkException] {
+      val names = Seq("Jacek", "Agata", "Sweet").toDF("name")
+      val hello = udf { _: String => {
+        throw new SparkException("throw spark exception for udf test")

Review Comment:
   User's functions shouldn't throw the Spark exception. Let's make the test 
more realistic and throw some other Java exception.



##########
sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala:
##########
@@ -389,15 +389,19 @@ class QueryExecutionErrorsSuite extends QueryTest
     }
   }
 
-  test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") {

Review Comment:
   Why did you remove the test?



##########
sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala:
##########
@@ -389,15 +389,19 @@ class QueryExecutionErrorsSuite extends QueryTest
     }
   }
 
-  test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") {
-    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
-      val e = intercept[SparkDateTimeException] {
-        sql("select make_timestamp(2012, 11, 30, 9, 19, 
60.66666666)").collect()
-      }
-      assert(e.getErrorClass === "INVALID_FRACTION_OF_SECOND")
-      assert(e.getSqlState === "22023")
-      assert(e.getMessage === "The fraction of sec must be zero. Valid range 
is [0, 60]. " +
-        "If necessary set spark.sql.ansi.enabled to false to bypass this 
error. ")
+  test("FAILED_EXECUTE_UDF: execute user defined function") {
+    val e1 = intercept[SparkException] {
+      val names = Seq("Jacek", "Agata", "Sweet").toDF("name")
+      val hello = udf { _: String => {
+        throw new SparkException("throw spark exception for udf test")

Review Comment:
   User's functions shouldn't throw the Spark exception. Let's make the test 
more realistic and throw some other Java exception.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to