Repository: spark
Updated Branches:
  refs/heads/master f138cb873 -> 150f6a89b


[SPARK-11595] [SQL] Fixes ADD JAR when the input path contains URL scheme

Author: Cheng Lian <l...@databricks.com>

Closes #9569 from liancheng/spark-11595.fix-add-jar.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/150f6a89
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/150f6a89
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/150f6a89

Branch: refs/heads/master
Commit: 150f6a89b79f0e5bc31aa83731429dc7ac5ea76b
Parents: f138cb8
Author: Cheng Lian <l...@databricks.com>
Authored: Mon Nov 9 14:32:52 2015 -0800
Committer: Davies Liu <davies....@gmail.com>
Committed: Mon Nov 9 14:32:52 2015 -0800

----------------------------------------------------------------------
 .../sql/hive/thriftserver/HiveThriftServer2Suites.scala  |  1 +
 .../org/apache/spark/sql/hive/client/ClientWrapper.scala | 11 +++++++++--
 .../spark/sql/hive/client/IsolatedClientLoader.scala     |  9 +++------
 .../apache/spark/sql/hive/execution/HiveQuerySuite.scala |  8 +++++---
 4 files changed, 18 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/150f6a89/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index ff8ca01..5903b9e 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -41,6 +41,7 @@ import org.apache.thrift.transport.TSocket
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.sql.hive.test.TestHive
 import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
 import org.apache.spark.util.Utils
 import org.apache.spark.{Logging, SparkFunSuite}

http://git-wip-us.apache.org/repos/asf/spark/blob/150f6a89/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
index 3dce86c..f1c2489 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala
@@ -19,7 +19,6 @@ package org.apache.spark.sql.hive.client
 
 import java.io.{File, PrintStream}
 import java.util.{Map => JMap}
-import javax.annotation.concurrent.GuardedBy
 
 import scala.collection.JavaConverters._
 import scala.language.reflectiveCalls
@@ -548,7 +547,15 @@ private[hive] class ClientWrapper(
   }
 
   def addJar(path: String): Unit = {
-    clientLoader.addJar(path)
+    val uri = new Path(path).toUri
+    val jarURL = if (uri.getScheme == null) {
+      // `path` is a local file path without a URL scheme
+      new File(path).toURI.toURL
+    } else {
+      // `path` is a URL with a scheme
+      uri.toURL
+    }
+    clientLoader.addJar(jarURL)
     runSqlHive(s"ADD JAR $path")
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/150f6a89/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
index f99c3ed..e041e0d 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
@@ -22,7 +22,6 @@ import java.lang.reflect.InvocationTargetException
 import java.net.{URL, URLClassLoader}
 import java.util
 
-import scala.collection.mutable
 import scala.language.reflectiveCalls
 import scala.util.Try
 
@@ -30,10 +29,9 @@ import org.apache.commons.io.{FileUtils, IOUtils}
 
 import org.apache.spark.Logging
 import org.apache.spark.deploy.SparkSubmitUtils
-import org.apache.spark.util.{MutableURLClassLoader, Utils}
-
 import org.apache.spark.sql.catalyst.util.quietly
 import org.apache.spark.sql.hive.HiveContext
+import org.apache.spark.util.{MutableURLClassLoader, Utils}
 
 /** Factory for `IsolatedClientLoader` with specific versions of hive. */
 private[hive] object IsolatedClientLoader {
@@ -190,9 +188,8 @@ private[hive] class IsolatedClientLoader(
     new NonClosableMutableURLClassLoader(isolatedClassLoader)
   }
 
-  private[hive] def addJar(path: String): Unit = synchronized {
-    val jarURL = new java.io.File(path).toURI.toURL
-    classLoader.addURL(jarURL)
+  private[hive] def addJar(path: URL): Unit = synchronized {
+    classLoader.addURL(path)
   }
 
   /** The isolated client interface to Hive. */

http://git-wip-us.apache.org/repos/asf/spark/blob/150f6a89/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index fc72e3c..78378c8 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -927,7 +927,7 @@ class HiveQuerySuite extends HiveComparisonTest with 
BeforeAndAfter {
   test("SPARK-2263: Insert Map<K, V> values") {
     sql("CREATE TABLE m(value MAP<INT, STRING>)")
     sql("INSERT OVERWRITE TABLE m SELECT MAP(key, value) FROM src LIMIT 10")
-    sql("SELECT * FROM m").collect().zip(sql("SELECT * FROM src LIMIT 
10").collect()).map {
+    sql("SELECT * FROM m").collect().zip(sql("SELECT * FROM src LIMIT 
10").collect()).foreach {
       case (Row(map: Map[_, _]), Row(key: Int, value: String)) =>
         assert(map.size === 1)
         assert(map.head === (key, value))
@@ -961,10 +961,12 @@ class HiveQuerySuite extends HiveComparisonTest with 
BeforeAndAfter {
 
   test("CREATE TEMPORARY FUNCTION") {
     val funcJar = TestHive.getHiveFile("TestUDTF.jar").getCanonicalPath
-    sql(s"ADD JAR $funcJar")
+    val jarURL = s"file://$funcJar"
+    sql(s"ADD JAR $jarURL")
     sql(
       """CREATE TEMPORARY FUNCTION udtf_count2 AS
-        | 
'org.apache.spark.sql.hive.execution.GenericUDTFCount2'""".stripMargin)
+        |'org.apache.spark.sql.hive.execution.GenericUDTFCount2'
+      """.stripMargin)
     assert(sql("DESCRIBE FUNCTION udtf_count2").count > 1)
     sql("DROP TEMPORARY FUNCTION udtf_count2")
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to