")
>>> > .set("spark.executor.memory", "1g"))
>>> > sc = SparkContext(conf = conf)
>>> > ssc = StreamingContext(sc, 1)
>>> > lines = ssc.textFileStream('../inputs/2.txt')
>>> > counts =
x: (x, 1))\
>> > .reduceByKey(lambda a, b: a+b)
>> > counts.pprint()
>> > ssc.start()
>> > ssc.awaitTermination()
>> >
>> >
>> > content of 2.txt file is following:
>> >
>> > a1 b1 c1 d1 e1 f1 g1
o some thing wrong? Otherwise why it does not work?
--
View this message in context:
http://apache-spark-user-list.1001560.n3.nabble.com/Python-Spark-Streaming-example-with-textFileStream-does-not-work-Why-tp24579.html
Sent from the Apache
e: 2015-09-03 15:08:18
> ---
>
> and Spark's logs.
>
> Do I do some thing wrong? Otherwise why it does not work?
>
>
>
> --
> View this message in context:
> http