Repository: spark
Updated Branches:
  refs/heads/master 2f2686a73 -> 1f29d502e


[SPARK-9812] [STREAMING] Fix Python 3 compatibility issue in PySpark Streaming 
and some docs

This PR includes the following fixes:
1. Use `range` instead of `xrange` in `queue_stream.py` to support Python 3.
2. Fix the issue that `utf8_decoder` will return `bytes` rather than `str` when 
receiving an empty `bytes` in Python 3.
3. Fix the commands in docs so that the user can copy them directly to the 
command line. The previous commands was broken in the middle of a path, so when 
copying to the command line, the path would be split to two parts by the extra 
spaces, which forces the user to fix it manually.

Author: zsxwing <zsxw...@gmail.com>

Closes #8315 from zsxwing/SPARK-9812.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1f29d502
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1f29d502
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1f29d502

Branch: refs/heads/master
Commit: 1f29d502e7ecd6faa185d70dc714f9ea3922fb6d
Parents: 2f2686a
Author: zsxwing <zsxw...@gmail.com>
Authored: Wed Aug 19 18:36:01 2015 -0700
Committer: Tathagata Das <tathagata.das1...@gmail.com>
Committed: Wed Aug 19 18:36:01 2015 -0700

----------------------------------------------------------------------
 examples/src/main/python/streaming/direct_kafka_wordcount.py | 6 +++---
 examples/src/main/python/streaming/flume_wordcount.py        | 5 +++--
 examples/src/main/python/streaming/kafka_wordcount.py        | 5 +++--
 examples/src/main/python/streaming/mqtt_wordcount.py         | 5 +++--
 examples/src/main/python/streaming/queue_stream.py           | 4 ++--
 python/pyspark/streaming/flume.py                            | 4 +++-
 python/pyspark/streaming/kafka.py                            | 4 +++-
 python/pyspark/streaming/kinesis.py                          | 4 +++-
 8 files changed, 23 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/examples/src/main/python/streaming/direct_kafka_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/direct_kafka_wordcount.py 
b/examples/src/main/python/streaming/direct_kafka_wordcount.py
index 6ef188a..ea20678 100644
--- a/examples/src/main/python/streaming/direct_kafka_wordcount.py
+++ b/examples/src/main/python/streaming/direct_kafka_wordcount.py
@@ -23,8 +23,8 @@
  http://kafka.apache.org/documentation.html#quickstart
 
  and then run the example
-    `$ bin/spark-submit --jars external/kafka-assembly/target/scala-*/\
-      spark-streaming-kafka-assembly-*.jar \
+    `$ bin/spark-submit --jars \
+      
external/kafka-assembly/target/scala-*/spark-streaming-kafka-assembly-*.jar \
       examples/src/main/python/streaming/direct_kafka_wordcount.py \
       localhost:9092 test`
 """
@@ -37,7 +37,7 @@ from pyspark.streaming.kafka import KafkaUtils
 
 if __name__ == "__main__":
     if len(sys.argv) != 3:
-        print >> sys.stderr, "Usage: direct_kafka_wordcount.py <broker_list> 
<topic>"
+        print("Usage: direct_kafka_wordcount.py <broker_list> <topic>", 
file=sys.stderr)
         exit(-1)
 
     sc = SparkContext(appName="PythonStreamingDirectKafkaWordCount")

http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/examples/src/main/python/streaming/flume_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/flume_wordcount.py 
b/examples/src/main/python/streaming/flume_wordcount.py
index 091b64d..d75bc6d 100644
--- a/examples/src/main/python/streaming/flume_wordcount.py
+++ b/examples/src/main/python/streaming/flume_wordcount.py
@@ -23,8 +23,9 @@
  https://flume.apache.org/documentation.html
 
  and then run the example
-    `$ bin/spark-submit --jars external/flume-assembly/target/scala-*/\
-      spark-streaming-flume-assembly-*.jar 
examples/src/main/python/streaming/flume_wordcount.py \
+    `$ bin/spark-submit --jars \
+      
external/flume-assembly/target/scala-*/spark-streaming-flume-assembly-*.jar \
+      examples/src/main/python/streaming/flume_wordcount.py \
       localhost 12345
 """
 from __future__ import print_function

http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/examples/src/main/python/streaming/kafka_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/kafka_wordcount.py 
b/examples/src/main/python/streaming/kafka_wordcount.py
index b178e78..8d697f6 100644
--- a/examples/src/main/python/streaming/kafka_wordcount.py
+++ b/examples/src/main/python/streaming/kafka_wordcount.py
@@ -23,8 +23,9 @@
  http://kafka.apache.org/documentation.html#quickstart
 
  and then run the example
-    `$ bin/spark-submit --jars external/kafka-assembly/target/scala-*/\
-      spark-streaming-kafka-assembly-*.jar 
examples/src/main/python/streaming/kafka_wordcount.py \
+    `$ bin/spark-submit --jars \
+      
external/kafka-assembly/target/scala-*/spark-streaming-kafka-assembly-*.jar \
+      examples/src/main/python/streaming/kafka_wordcount.py \
       localhost:2181 test`
 """
 from __future__ import print_function

http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/examples/src/main/python/streaming/mqtt_wordcount.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/mqtt_wordcount.py 
b/examples/src/main/python/streaming/mqtt_wordcount.py
index 617ce5e..abf9c0e 100644
--- a/examples/src/main/python/streaming/mqtt_wordcount.py
+++ b/examples/src/main/python/streaming/mqtt_wordcount.py
@@ -26,8 +26,9 @@
  http://www.eclipse.org/paho/#getting-started
 
  and then run the example
-    `$ bin/spark-submit --jars external/mqtt-assembly/target/scala-*/\
-      spark-streaming-mqtt-assembly-*.jar 
examples/src/main/python/streaming/mqtt_wordcount.py \
+    `$ bin/spark-submit --jars \
+      
external/mqtt-assembly/target/scala-*/spark-streaming-mqtt-assembly-*.jar \
+      examples/src/main/python/streaming/mqtt_wordcount.py \
       tcp://localhost:1883 foo`
 """
 

http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/examples/src/main/python/streaming/queue_stream.py
----------------------------------------------------------------------
diff --git a/examples/src/main/python/streaming/queue_stream.py 
b/examples/src/main/python/streaming/queue_stream.py
index dcd6a0f..b380890 100644
--- a/examples/src/main/python/streaming/queue_stream.py
+++ b/examples/src/main/python/streaming/queue_stream.py
@@ -36,8 +36,8 @@ if __name__ == "__main__":
     # Create the queue through which RDDs can be pushed to
     # a QueueInputDStream
     rddQueue = []
-    for i in xrange(5):
-        rddQueue += [ssc.sparkContext.parallelize([j for j in xrange(1, 
1001)], 10)]
+    for i in range(5):
+        rddQueue += [ssc.sparkContext.parallelize([j for j in range(1, 1001)], 
10)]
 
     # Create the QueueInputDStream and use it do some processing
     inputStream = ssc.queueStream(rddQueue)

http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/python/pyspark/streaming/flume.py
----------------------------------------------------------------------
diff --git a/python/pyspark/streaming/flume.py 
b/python/pyspark/streaming/flume.py
index cbb573f..c0cdc50 100644
--- a/python/pyspark/streaming/flume.py
+++ b/python/pyspark/streaming/flume.py
@@ -31,7 +31,9 @@ __all__ = ['FlumeUtils', 'utf8_decoder']
 
 def utf8_decoder(s):
     """ Decode the unicode as UTF-8 """
-    return s and s.decode('utf-8')
+    if s is None:
+        return None
+    return s.decode('utf-8')
 
 
 class FlumeUtils(object):

http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/python/pyspark/streaming/kafka.py
----------------------------------------------------------------------
diff --git a/python/pyspark/streaming/kafka.py 
b/python/pyspark/streaming/kafka.py
index dc5b7fd..8a814c6 100644
--- a/python/pyspark/streaming/kafka.py
+++ b/python/pyspark/streaming/kafka.py
@@ -29,7 +29,9 @@ __all__ = ['Broker', 'KafkaUtils', 'OffsetRange', 
'TopicAndPartition', 'utf8_dec
 
 def utf8_decoder(s):
     """ Decode the unicode as UTF-8 """
-    return s and s.decode('utf-8')
+    if s is None:
+        return None
+    return s.decode('utf-8')
 
 
 class KafkaUtils(object):

http://git-wip-us.apache.org/repos/asf/spark/blob/1f29d502/python/pyspark/streaming/kinesis.py
----------------------------------------------------------------------
diff --git a/python/pyspark/streaming/kinesis.py 
b/python/pyspark/streaming/kinesis.py
index bcfe270..34be588 100644
--- a/python/pyspark/streaming/kinesis.py
+++ b/python/pyspark/streaming/kinesis.py
@@ -26,7 +26,9 @@ __all__ = ['KinesisUtils', 'InitialPositionInStream', 
'utf8_decoder']
 
 def utf8_decoder(s):
     """ Decode the unicode as UTF-8 """
-    return s and s.decode('utf-8')
+    if s is None:
+        return None
+    return s.decode('utf-8')
 
 
 class KinesisUtils(object):


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to