This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e6d8d0f  [SPARK-27121][REPL] Resolve Scala compiler failure for Java 
9+ in REPL
e6d8d0f is described below

commit e6d8d0f13fec40596abe39476d84119ba6b1ba5b
Author: Sean Owen <sean.o...@databricks.com>
AuthorDate: Sat Mar 30 02:30:34 2019 -0500

    [SPARK-27121][REPL] Resolve Scala compiler failure for Java 9+ in REPL
    
    ## What changes were proposed in this pull request?
    
    Avoid trying to extract the classpath of the environment from a 
URLClassLoader in Java 11, as the default classloader isn't one. Use 
`java.class.path` instead.
    
    ## How was this patch tested?
    
    Existing tests, manually tested under Java 11.
    
    Closes #24239 from srowen/SPARK-27121.0.
    
    Authored-by: Sean Owen <sean.o...@databricks.com>
    Signed-off-by: Sean Owen <sean.o...@databricks.com>
---
 .../scala/org/apache/spark/repl/ReplSuite.scala    | 24 +++++++---------------
 .../org/apache/spark/repl/SingletonReplSuite.scala | 17 ++-------------
 2 files changed, 9 insertions(+), 32 deletions(-)

diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala 
b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index a46cb6b..4849c7c 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -18,9 +18,7 @@
 package org.apache.spark.repl
 
 import java.io._
-import java.net.URLClassLoader
 
-import scala.collection.mutable.ArrayBuffer
 import scala.tools.nsc.interpreter.SimpleReader
 
 import org.apache.log4j.{Level, LogManager}
@@ -34,25 +32,16 @@ class ReplSuite extends SparkFunSuite {
   def runInterpreter(master: String, input: String): String = {
     val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath"
 
-    val in = new BufferedReader(new StringReader(input + "\n"))
-    val out = new StringWriter()
-    val cl = getClass.getClassLoader
-    var paths = new ArrayBuffer[String]
-    if (cl.isInstanceOf[URLClassLoader]) {
-      val urlLoader = cl.asInstanceOf[URLClassLoader]
-      for (url <- urlLoader.getURLs) {
-        if (url.getProtocol == "file") {
-          paths += url.getFile
-        }
-      }
-    }
-    val classpath = paths.map(new 
File(_).getAbsolutePath).mkString(File.pathSeparator)
-
     val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH)
+    val classpath = System.getProperty("java.class.path")
     System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
+
     Main.sparkContext = null
     Main.sparkSession = null // causes recreation of SparkContext for each 
test.
     Main.conf.set("spark.master", master)
+
+    val in = new BufferedReader(new StringReader(input + "\n"))
+    val out = new StringWriter()
     Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new 
PrintWriter(out)))
 
     if (oldExecutorClasspath != null) {
@@ -60,7 +49,8 @@ class ReplSuite extends SparkFunSuite {
     } else {
       System.clearProperty(CONF_EXECUTOR_CLASSPATH)
     }
-    return out.toString
+
+    out.toString
   }
 
   // Simulate the paste mode in Scala REPL.
diff --git a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala 
b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala
index d49e0fd..039fc62 100644
--- a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala
@@ -18,9 +18,6 @@
 package org.apache.spark.repl
 
 import java.io._
-import java.net.URLClassLoader
-
-import scala.collection.mutable.ArrayBuffer
 
 import org.apache.commons.lang3.StringEscapeUtils
 
@@ -42,19 +39,9 @@ class SingletonReplSuite extends SparkFunSuite {
   override def beforeAll(): Unit = {
     super.beforeAll()
 
-    val cl = getClass.getClassLoader
-    var paths = new ArrayBuffer[String]
-    if (cl.isInstanceOf[URLClassLoader]) {
-      val urlLoader = cl.asInstanceOf[URLClassLoader]
-      for (url <- urlLoader.getURLs) {
-        if (url.getProtocol == "file") {
-          paths += url.getFile
-        }
-      }
-    }
-    val classpath = paths.map(new 
File(_).getAbsolutePath).mkString(File.pathSeparator)
-
+    val classpath = System.getProperty("java.class.path")
     System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath)
+
     Main.conf.set("spark.master", "local-cluster[2,1,1024]")
     val interp = new SparkILoop(
       new BufferedReader(new InputStreamReader(new PipedInputStream(in))),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to