This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 6ab8b384d03 [SPARK-38776][MLLIB][TESTS][FOLLOWUP] Disable ANSI_ENABLED 
more for `Out of Range` failures
6ab8b384d03 is described below

commit 6ab8b384d03d9ba1a046327a4ba9b4c7406ad706
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sun Apr 3 13:31:34 2022 -0700

    [SPARK-38776][MLLIB][TESTS][FOLLOWUP] Disable ANSI_ENABLED more for `Out of 
Range` failures
    
    This is a follow-up of https://github.com/apache/spark/pull/36051.
    After fixing `Overflow` errors, `Out Of Range` failures are observed in the 
rest of test code in the same test case.
    
    To make GitHub Action ANSI test CI pass.
    
    No.
    
    At this time, I used the following to simulate GitHub Action ANSI job.
    ```
    $ SPARK_ANSI_SQL_MODE=true build/sbt "mllib/testOnly *.ALSSuite"
    ...
    [info] All tests passed.
    [success] Total time: 80 s (01:20), completed Apr 3, 2022 1:05:50 PM
    ```
    
    Closes #36054 from dongjoon-hyun/SPARK-38776-2.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
    (cherry picked from commit fbcab01ffb672dda98f6f472da44aed26b59b2a5)
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../apache/spark/ml/recommendation/ALSSuite.scala  | 30 ++++++++++++----------
 1 file changed, 17 insertions(+), 13 deletions(-)

diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala 
b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
index 7372b2d7673..e925f7b574e 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/recommendation/ALSSuite.scala
@@ -228,18 +228,20 @@ class ALSSuite extends MLTest with DefaultReadWriteTest 
with Logging {
     }
 
     val msg = "either out of Integer range or contained a fractional part"
-    withClue("Invalid Long: out of range") {
-      val e: SparkException = intercept[SparkException] {
-        df.select(checkedCast(lit(1231000000000L))).collect()
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
+      withClue("Invalid Long: out of range") {
+        val e: SparkException = intercept[SparkException] {
+          df.select(checkedCast(lit(1231000000000L))).collect()
+        }
+        assert(e.getMessage.contains(msg))
       }
-      assert(e.getMessage.contains(msg))
-    }
 
-    withClue("Invalid Decimal: out of range") {
-      val e: SparkException = intercept[SparkException] {
-        df.select(checkedCast(lit(1231000000000.0).cast(DecimalType(15, 
2)))).collect()
+      withClue("Invalid Decimal: out of range") {
+        val e: SparkException = intercept[SparkException] {
+          df.select(checkedCast(lit(1231000000000.0).cast(DecimalType(15, 
2)))).collect()
+        }
+        assert(e.getMessage.contains(msg))
       }
-      assert(e.getMessage.contains(msg))
     }
 
     withClue("Invalid Decimal: fractional part") {
@@ -249,11 +251,13 @@ class ALSSuite extends MLTest with DefaultReadWriteTest 
with Logging {
       assert(e.getMessage.contains(msg))
     }
 
-    withClue("Invalid Double: out of range") {
-      val e: SparkException = intercept[SparkException] {
-        df.select(checkedCast(lit(1231000000000.0))).collect()
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
+      withClue("Invalid Double: out of range") {
+        val e: SparkException = intercept[SparkException] {
+          df.select(checkedCast(lit(1231000000000.0))).collect()
+        }
+        assert(e.getMessage.contains(msg))
       }
-      assert(e.getMessage.contains(msg))
     }
 
     withClue("Invalid Double: fractional part") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to