File tree Expand file tree Collapse file tree 2 files changed +10
-10
lines changed
examples/src/main/python/streaming Expand file tree Collapse file tree 2 files changed +10
-10
lines changed Original file line number Diff line number Diff line change 11import sys
2- from operator import add
32
43from pyspark .streaming .context import StreamingContext
54from pyspark .streaming .duration import *
1211 duration = Seconds (1 ))
1312
1413 lines = ssc .socketTextStream (sys .argv [1 ], int (sys .argv [2 ]))
15- words = lines .flatMap (lambda line : line .split (" " ))
16- mapped_words = words .map (lambda word : (word , 1 ))
17- count = mapped_words .reduceByKey (add )
18- count .pyprint ()
14+ counts = lines .flatMap (lambda line : line .split (" " ))\
15+ .map (lambda word : (word , 1 ))\
16+ .reduceByKey (lambda a , b : a + b )
17+ counts .pyprint ()
1918
2019 ssc .start ()
2120 ssc .awaitTermination ()
Original file line number Diff line number Diff line change 88 print >> sys .stderr , "Usage: wordcount <directory>"
99 exit (- 1 )
1010
11- ssc = StreamingContext (appName = "PythonStreamingWordCount" , duration = Seconds (1 ))
11+ ssc = StreamingContext (appName = "PythonStreamingWordCount" ,
12+ duration = Seconds (1 ))
1213
1314 lines = ssc .textFileStream (sys .argv [1 ])
14- words = lines .flatMap (lambda line : line .split (" " ))
15- mapped_words = words .map (lambda x : (x , 1 ))
16- count = mapped_words .reduceByKey (lambda a , b : a + b )
17- count .pyprint ()
15+ counts = lines .flatMap (lambda line : line .split (" " ))\
16+ .map (lambda x : (x , 1 ))\
17+ .reduceByKey (lambda a , b : a + b )
18+ counts .pyprint ()
1819
1920 ssc .start ()
2021 ssc .awaitTermination ()
You can’t perform that action at this time.
0 commit comments