Repository: spark
Updated Branches:
  refs/heads/master b6cf13481 -> ba78383ba


SPARK-3318: Documentation update in addFile on how to use SparkFiles.get

Rather than specifying the path to SparkFiles we need to use the filename.

Author: Holden Karau <hol...@pigscanfly.ca>

Closes #2210 from 
holdenk/SPARK-3318-documentation-for-addfiles-should-say-to-use-file-not-path 
and squashes the following commits:

a25d27a [Holden Karau] Update the JavaSparkContext addFile method to be clear 
about using fileName with SparkFiles as well
0ebcb05 [Holden Karau] Documentation update in addFile on how to use 
SparkFiles.get to specify filename rather than path


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ba78383b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ba78383b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ba78383b

Branch: refs/heads/master
Commit: ba78383bace52b13ee931c6f2df445f721d5080a
Parents: b6cf134
Author: Holden Karau <hol...@pigscanfly.ca>
Authored: Sat Aug 30 16:58:17 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Sat Aug 30 16:58:17 2014 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala          | 3 +--
 .../main/scala/org/apache/spark/api/java/JavaSparkContext.scala  | 2 +-
 python/pyspark/context.py                                        | 4 ++--
 3 files changed, 4 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ba78383b/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index a80b3cc..cb4fb7c 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -796,7 +796,7 @@ class SparkContext(config: SparkConf) extends Logging {
    * Add a file to be downloaded with this Spark job on every node.
    * The `path` passed can be either a local file, a file in HDFS (or other 
Hadoop-supported
    * filesystems), or an HTTP, HTTPS or FTP URI.  To access the file in Spark 
jobs,
-   * use `SparkFiles.get(path)` to find its download location.
+   * use `SparkFiles.get(fileName)` to find its download location.
    */
   def addFile(path: String) {
     val uri = new URI(path)
@@ -1619,4 +1619,3 @@ private[spark] class WritableConverter[T](
     val writableClass: ClassTag[T] => Class[_ <: Writable],
     val convert: Writable => T)
   extends Serializable
-

http://git-wip-us.apache.org/repos/asf/spark/blob/ba78383b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala 
b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index e0a4815..8e178bc 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -545,7 +545,7 @@ class JavaSparkContext(val sc: SparkContext) extends 
JavaSparkContextVarargsWork
    * Add a file to be downloaded with this Spark job on every node.
    * The `path` passed can be either a local file, a file in HDFS (or other 
Hadoop-supported
    * filesystems), or an HTTP, HTTPS or FTP URI.  To access the file in Spark 
jobs,
-   * use `SparkFiles.get(path)` to find its download location.
+   * use `SparkFiles.get(fileName)` to find its download location.
    */
   def addFile(path: String) {
     sc.addFile(path)

http://git-wip-us.apache.org/repos/asf/spark/blob/ba78383b/python/pyspark/context.py
----------------------------------------------------------------------
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 82f76de..6e4fdaa 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -606,8 +606,8 @@ class SparkContext(object):
         FTP URI.
 
         To access the file in Spark jobs, use
-        L{SparkFiles.get(path)<pyspark.files.SparkFiles.get>} to find its
-        download location.
+        L{SparkFiles.get(fileName)<pyspark.files.SparkFiles.get>} with the
+        filename to find its download location.
 
         >>> from pyspark import SparkFiles
         >>> path = os.path.join(tempdir, "test.txt")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to