5050spark.executorEnv.VAR4=value4
5151spark.home=/path
5252>>> sorted(conf.getAll(), key=lambda p: p[0])
53- [(u'spark.executorEnv.VAR1', u'value1'), (u'spark.executorEnv.VAR3', u'value3'), (u'spark.executorEnv.VAR4', u'value4'), (u'spark.home', u'/path')]
53+ [(u'spark.executorEnv.VAR1', u'value1'), (u'spark.executorEnv.VAR3', u'value3'),\
54+ (u'spark.executorEnv.VAR4', u'value4'), (u'spark.home', u'/path')]
5455"""
5556
5657
@@ -118,9 +119,9 @@ def setExecutorEnv(self, key=None, value=None, pairs=None):
118119 """Set an environment variable to be passed to executors."""
119120 if (key is not None and pairs is not None ) or (key is None and pairs is None ):
120121 raise Exception ("Either pass one key-value pair or a list of pairs" )
121- elif key != None :
122+ elif key is not None :
122123 self ._jconf .setExecutorEnv (key , value )
123- elif pairs != None :
124+ elif pairs is not None :
124125 for (k , v ) in pairs :
125126 self ._jconf .setExecutorEnv (k , v )
126127 return self
@@ -137,7 +138,7 @@ def setAll(self, pairs):
137138
138139 def get (self , key , defaultValue = None ):
139140 """Get the configured value for some key, or return a default otherwise."""
140- if defaultValue == None : # Py4J doesn't call the right get() if we pass None
141+ if defaultValue is None : # Py4J doesn't call the right get() if we pass None
141142 if not self ._jconf .contains (key ):
142143 return None
143144 return self ._jconf .get (key )
0 commit comments