This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d1c53208b67 [SPARK-44095][SQL][TESTS] Make 
HiveExternalCatalogVersionsSuite skip old Spark versions on Java 21
d1c53208b67 is described below

commit d1c53208b67b982f5a38cfdb3b1dcde91d9cd029
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sun Jun 18 17:50:11 2023 -0700

    [SPARK-44095][SQL][TESTS] Make HiveExternalCatalogVersionsSuite skip old 
Spark versions on Java 21
    
    ### What changes were proposed in this pull request?
    
    This PR aims to make `HiveExternalCatalogVersionsSuite` skip old Spark 
versions when Java 21 is used for testing.
    
    ### Why are the changes needed?
    
    Old Apache Spark releases are unable to support Java 21. So, it causes a 
test failure at runtime.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs and manual testing on Java 21.
    
    **BEFORE**
    ```
    $ build/sbt "hive/testOnly *.HiveExternalCatalogVersionsSuite" -Phive
    ...
    [info]   2023-06-18 16:43:22.448 - stderr> Caused by: 
java.lang.IllegalStateException: java.lang.NoSuchMethodException: 
java.nio.DirectByteBuffer.<init>(long,int)
    ...
    [info] *** 1 SUITE ABORTED ***
    [error] Error during tests:
    [error]         org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite
    [error] (hive / Test / testOnly) sbt.TestsFailedException: Tests 
unsuccessful
    [error] Total time: 33 s, completed Jun 18, 2023, 4:43:23 PM
    ```
    
    **AFTER**
    ```
    $ build/sbt "hive/testOnly *.HiveExternalCatalogVersionsSuite" -Phive
    ...
    [info] HiveExternalCatalogVersionsSuite:
    [info] - backward compatibility (8 milliseconds)
    [info] Run completed in 1 second, 26 milliseconds.
    [info] Total number of tests run: 1
    [info] Suites: completed 1, aborted 0
    [info] Tests: succeeded 1, failed 0, canceled 0, ignored 0, pending 0
    [info] All tests passed.
    [success] Total time: 14 s, completed Jun 18, 2023, 4:42:24 PM
    ```
    
    Closes #41652 from dongjoon-hyun/SPARK-44095.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../spark/sql/hive/HiveExternalCatalogVersionsSuite.scala     | 11 ++++++++---
 1 file changed, 8 insertions(+), 3 deletions(-)

diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index c0abb93ce0c..bfa6c8c3838 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -203,7 +203,11 @@ class HiveExternalCatalogVersionsSuite extends 
SparkSubmitTestUtils {
 
     if (PROCESS_TABLES.testingVersions.isEmpty) {
       if (PROCESS_TABLES.isPythonVersionAvailable) {
-        logError("Fail to get the latest Spark versions to test.")
+        if (SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17)) {
+          logError("Fail to get the latest Spark versions to test.")
+        } else {
+          logInfo("Skip tests because old Spark versions don't support Java 
21.")
+        }
       } else {
         logError(s"Python version <  
${TestUtils.minimumPythonSupportedVersion}, " +
           "the running environment is unavailable.")
@@ -259,8 +263,9 @@ object PROCESS_TABLES extends QueryTest with SQLTestUtils {
   val isPythonVersionAvailable = TestUtils.isPythonVersionAvailable
   val releaseMirror = sys.env.getOrElse("SPARK_RELEASE_MIRROR",
     "https://dist.apache.org/repos/dist/release";)
-  // Tests the latest version of every release line.
-  val testingVersions: Seq[String] = if (isPythonVersionAvailable) {
+  // Tests the latest version of every release line if Java version is at most 
17.
+  val testingVersions: Seq[String] = if (isPythonVersionAvailable &&
+      SystemUtils.isJavaVersionAtMost(JavaVersion.JAVA_17)) {
     import scala.io.Source
     try Utils.tryWithResource(
       Source.fromURL(s"$releaseMirror/spark")) { source =>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to