File tree Expand file tree Collapse file tree 3 files changed +14
-0
lines changed
core/src/main/scala/org/apache/spark/deploy
yarn/src/main/scala/org/apache/spark/deploy/yarn Expand file tree Collapse file tree 3 files changed +14
-0
lines changed Original file line number Diff line number Diff line change @@ -328,6 +328,10 @@ object SparkSubmit {
328328 }
329329 }
330330
331+ if (args.isPython && System .getenv(" PYSPARK_ARCHIVES_PATH" ) != null ) {
332+ args.files = mergeFileLists(args.files, System .getenv(" PYSPARK_ARCHIVES_PATH" ))
333+ }
334+
331335 // If we're running a R app, set the main class to our specific R runner
332336 if (args.isR && deployMode == CLIENT ) {
333337 if (args.primaryResource == SPARKR_SHELL ) {
Original file line number Diff line number Diff line change @@ -326,6 +326,12 @@ private[spark] class Client(
326326 distCacheMgr.setDistFilesEnv(env)
327327 distCacheMgr.setDistArchivesEnv(env)
328328
329+ if (System .getenv(" PYSPARK_ARCHIVES_PATH" ) != null ) {
330+ val pythonPath = System .getenv(" PYSPARK_ARCHIVES_PATH" ).split(" ," ).map(
331+ p => (new Path (p)).getName).mkString(" :" )
332+ env(" PYTHONPATH" ) = pythonPath
333+ }
334+
329335 // Pick up any environment variables for the AM provided through spark.yarn.appMasterEnv.*
330336 val amEnvPrefix = " spark.yarn.appMasterEnv."
331337 sparkConf.getAll
Original file line number Diff line number Diff line change @@ -285,6 +285,10 @@ class ExecutorRunnable(
285285 YarnSparkHadoopUtil .addPathToEnvironment(env, key, value)
286286 }
287287
288+ if (System .getenv(" PYTHONPATH" ) != null ) {
289+ env(" PYTHONPATH" ) = System .getenv(" PYTHONPATH" )
290+ }
291+
288292 // Keep this for backwards compatibility but users should move to the config
289293 sys.env.get(" SPARK_YARN_USER_ENV" ).foreach { userEnvs =>
290294 YarnSparkHadoopUtil .setEnvFromInputString(env, userEnvs)
You can’t perform that action at this time.
0 commit comments