@@ -171,7 +171,7 @@ def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize,
171171
172172 SparkFiles ._sc = self
173173 root_dir = SparkFiles .getRootDirectory ()
174- sys .path .append ( root_dir )
174+ sys .path .insert ( 1 , root_dir )
175175
176176 # Deploy any code dependencies specified in the constructor
177177 self ._python_includes = list ()
@@ -183,10 +183,9 @@ def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize,
183183 for path in self ._conf .get ("spark.submit.pyFiles" , "" ).split ("," ):
184184 if path != "" :
185185 (dirname , filename ) = os .path .split (path )
186- self ._python_includes .append (filename )
187- sys .path .append (path )
188- if dirname not in sys .path :
189- sys .path .append (dirname )
186+ if filename .lower ().endswith ("zip" ) or filename .lower ().endswith ("egg" ):
187+ self ._python_includes .append (filename )
188+ sys .path .insert (1 , os .path .join (SparkFiles .getRootDirectory (), filename ))
190189
191190 # Create a temporary directory inside spark.local.dir:
192191 local_dir = self ._jvm .org .apache .spark .util .Utils .getLocalDir (self ._jsc .sc ().conf ())
@@ -667,7 +666,7 @@ def addPyFile(self, path):
667666 if filename .endswith ('.zip' ) or filename .endswith ('.ZIP' ) or filename .endswith ('.egg' ):
668667 self ._python_includes .append (filename )
669668 # for tests in local mode
670- sys .path .append ( os .path .join (SparkFiles .getRootDirectory (), filename ))
669+ sys .path .insert ( 1 , os .path .join (SparkFiles .getRootDirectory (), filename ))
671670
672671 def setCheckpointDir (self , dirName ):
673672 """
0 commit comments