Repository: spark
Updated Branches:
  refs/heads/branch-2.0 1a8ea000e -> 452e468f2


[SPARK-17577][CORE][2.0 BACKPORT] Update SparkContext.addFile to make it work 
well on Windows

## What changes were proposed in this pull request?
Update ```SparkContext.addFile``` to correct the use of ```URI``` and 
```Path```, then it can work well on Windows. This is used for branch-2.0 
backport, more details at #15131.

## How was this patch tested?
Backport, checked by appveyor.

Author: Yanbo Liang <yblia...@gmail.com>

Closes #15217 from yanboliang/uri-2.0.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/452e468f
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/452e468f
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/452e468f

Branch: refs/heads/branch-2.0
Commit: 452e468f280d69c930782a7588a87a816cc9585a
Parents: 1a8ea00
Author: Yanbo Liang <yblia...@gmail.com>
Authored: Sat Sep 24 04:50:22 2016 +0900
Committer: Kousuke Saruta <saru...@oss.nttdata.co.jp>
Committed: Sat Sep 24 04:50:22 2016 +0900

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/452e468f/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 214758f..251c16f 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -1419,7 +1419,7 @@ class SparkContext(config: SparkConf) extends Logging 
with ExecutorAllocationCli
    * supported for Hadoop-supported filesystems.
    */
   def addFile(path: String, recursive: Boolean): Unit = {
-    val uri = new URI(path)
+    val uri = new Path(path).toUri
     val schemeCorrectedPath = uri.getScheme match {
       case null | "local" => new File(path).getCanonicalFile.toURI.toString
       case _ => path
@@ -1453,8 +1453,8 @@ class SparkContext(config: SparkConf) extends Logging 
with ExecutorAllocationCli
       logInfo(s"Added file $path at $key with timestamp $timestamp")
       // Fetch the file locally so that closures which are run on the driver 
can still use the
       // SparkFiles API to access files.
-      Utils.fetchFile(path, new File(SparkFiles.getRootDirectory()), conf, 
env.securityManager,
-        hadoopConfiguration, timestamp, useCache = false)
+      Utils.fetchFile(uri.toString, new File(SparkFiles.getRootDirectory()), 
conf,
+        env.securityManager, hadoopConfiguration, timestamp, useCache = false)
       postEnvironmentUpdate()
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to