Repository: spark
Updated Branches:
  refs/heads/branch-1.0 386fd83b2 -> 7179180b7


Added doctest and method description in context.py

Added doctest for method textFile and description for methods 
_initialize_context and _ensure_initialized in context.py

Author: Jyotiska NK <jyotiska...@gmail.com>

Closes #187 from jyotiska/pyspark_context and squashes the following commits:

356f945 [Jyotiska NK] Added doctest for textFile method in context.py
5b23686 [Jyotiska NK] Updated context.py with method descriptions

(cherry picked from commit 9cff1dd25abc5e848720d853172ed42e35376fd0)
Signed-off-by: Matei Zaharia <ma...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7179180b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7179180b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7179180b

Branch: refs/heads/branch-1.0
Commit: 7179180b7e4a73021d6d715a90877bef0637da49
Parents: 386fd83
Author: Jyotiska NK <jyotiska...@gmail.com>
Authored: Wed May 28 23:08:39 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Wed May 28 23:08:48 2014 -0700

----------------------------------------------------------------------
 python/pyspark/context.py | 15 ++++++++++++++-
 1 file changed, 14 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/7179180b/python/pyspark/context.py
----------------------------------------------------------------------
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 27b440d..56746cb 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -173,12 +173,18 @@ class SparkContext(object):
         self._temp_dir = \
             
self._jvm.org.apache.spark.util.Utils.createTempDir(local_dir).getAbsolutePath()
 
-    # Initialize SparkContext in function to allow subclass specific 
initialization
     def _initialize_context(self, jconf):
+        """
+        Initialize SparkContext in function to allow subclass specific 
initialization
+        """
         return self._jvm.JavaSparkContext(jconf)
 
     @classmethod
     def _ensure_initialized(cls, instance=None, gateway=None):
+        """
+        Checks whether a SparkContext is initialized or not.
+        Throws error if a SparkContext is already running.
+        """
         with SparkContext._lock:
             if not SparkContext._gateway:
                 SparkContext._gateway = gateway or launch_gateway()
@@ -270,6 +276,13 @@ class SparkContext(object):
         Read a text file from HDFS, a local file system (available on all
         nodes), or any Hadoop-supported file system URI, and return it as an
         RDD of Strings.
+        
+        >>> path = os.path.join(tempdir, "sample-text.txt")
+        >>> with open(path, "w") as testFile:
+        ...    testFile.write("Hello world!")
+        >>> textFile = sc.textFile(path)
+        >>> textFile.collect()
+        [u'Hello world!']
         """
         minPartitions = minPartitions or min(self.defaultParallelism, 2)
         return RDD(self._jsc.textFile(name, minPartitions), self,

Reply via email to