@@ -43,7 +43,7 @@ def setUp(self):
4343
4444 def tearDown (self ):
4545 # Do not call pyspark.streaming.context.StreamingContext.stop directly because
46- # we do not wait to shutdown call back server and py4j client
46+ # we do not wait to shutdown py4j client.
4747 self .ssc ._jssc .stop ()
4848 self .ssc ._sc .stop ()
4949 # Why does it long time to terminate StremaingContext and SparkContext?
@@ -74,7 +74,6 @@ def setUp(self):
7474 PySparkStreamingTestCase .setUp (self )
7575 self .timeout = 10 # seconds
7676 self .numInputPartitions = 2
77- self .result = list ()
7877
7978 def tearDown (self ):
8079 PySparkStreamingTestCase .tearDown (self )
@@ -426,7 +425,8 @@ def _run_stream(self, test_input, test_func, expected_output, numSlices=None):
426425 # Apply test function to stream.
427426 test_stream = test_func (test_input_stream )
428427 # Add job to get output from stream.
429- test_stream ._test_output (self .result )
428+ result = list ()
429+ test_stream ._test_output (result )
430430 self .ssc .start ()
431431
432432 start_time = time .time ()
@@ -438,10 +438,10 @@ def _run_stream(self, test_input, test_func, expected_output, numSlices=None):
438438 break
439439 self .ssc .awaitTermination (50 )
440440 # Check if the output is the same length of expexted output.
441- if len (expected_output ) == len (self . result ):
441+ if len (expected_output ) == len (result ):
442442 break
443443
444- return self . result
444+ return result
445445
446446class TestSaveAsFilesSuite (PySparkStreamingTestCase ):
447447 def setUp (self ):
0 commit comments