This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2234667  [SPARK-27563][SQL][TEST] automatically get the latest Spark 
versions in HiveExternalCatalogVersionsSuite
2234667 is described below

commit 2234667b159bf19a68758da3ff20cfae3c058c25
Author: Wenchen Fan <wenc...@databricks.com>
AuthorDate: Fri Apr 26 16:37:43 2019 +0900

    [SPARK-27563][SQL][TEST] automatically get the latest Spark versions in 
HiveExternalCatalogVersionsSuite
    
    ## What changes were proposed in this pull request?
    
    We can get the latest downloadable Spark versions from 
https://dist.apache.org/repos/dist/release/spark/
    
    ## How was this patch tested?
    
    manually.
    
    Closes #24454 from cloud-fan/test.
    
    Authored-by: Wenchen Fan <wenc...@databricks.com>
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
---
 .../sql/hive/HiveExternalCatalogVersionsSuite.scala   | 19 ++++++++++++++++++-
 1 file changed, 18 insertions(+), 1 deletion(-)

diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index 0a05ec5..ec10295 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -22,6 +22,7 @@ import java.nio.charset.StandardCharsets
 import java.nio.file.{Files, Paths}
 
 import scala.sys.process._
+import scala.util.control.NonFatal
 
 import org.apache.hadoop.conf.Configuration
 
@@ -169,6 +170,10 @@ class HiveExternalCatalogVersionsSuite extends 
SparkSubmitTestUtils {
       """.stripMargin.getBytes("utf8"))
     // scalastyle:on line.size.limit
 
+    if (PROCESS_TABLES.testingVersions.isEmpty) {
+      fail("Fail to get the lates Spark versions to test.")
+    }
+
     PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version, 
index) =>
       val sparkHome = new File(sparkTestingDir, s"spark-$version")
       if (!sparkHome.exists()) {
@@ -206,7 +211,19 @@ class HiveExternalCatalogVersionsSuite extends 
SparkSubmitTestUtils {
 
 object PROCESS_TABLES extends QueryTest with SQLTestUtils {
   // Tests the latest version of every release line.
-  val testingVersions = Seq("2.3.3", "2.4.2")
+  val testingVersions: Seq[String] = {
+    import scala.io.Source
+    try {
+      
Source.fromURL("https://dist.apache.org/repos/dist/release/spark/";).mkString
+        .split("\n")
+        .filter(_.contains("""<li><a href="spark-"""))
+        .map("""<a 
href="spark-(\d.\d.\d)/">""".r.findFirstMatchIn(_).get.group(1))
+        .filter(_ < org.apache.spark.SPARK_VERSION)
+    } catch {
+      // do not throw exception during object initialization.
+      case NonFatal(_) => Nil
+    }
+  }
 
   protected var spark: SparkSession = _
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to