Skip to content

Commit 7c4b724

Browse files
mpmolekkai-chi
authored andcommitted
[SPARK-25934][MESOS] Don't propagate SPARK_CONF_DIR from spark submit
## What changes were proposed in this pull request? Don't propagate SPARK_CONF_DIR to the driver in mesos cluster mode. ## How was this patch tested? I built the 2.3.2 tag with this patch added and deployed a test job to a mesos cluster to confirm that the incorrect SPARK_CONF_DIR was no longer passed from the submit command. Closes apache#22937 from mpmolek/fix-conf-dir. Authored-by: Matt Molek <[email protected]> Signed-off-by: Sean Owen <[email protected]> (cherry picked from commit 696b75a) Signed-off-by: Sean Owen <[email protected]>
1 parent 6488048 commit 7c4b724

File tree

2 files changed

+17
-3
lines changed

2 files changed

+17
-3
lines changed

core/src/main/scala/org/apache/spark/deploy/rest/RestSubmissionClient.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -408,6 +408,10 @@ private[spark] class RestSubmissionClient(master: String) extends Logging {
408408
}
409409

410410
private[spark] object RestSubmissionClient {
411+
412+
// SPARK_HOME and SPARK_CONF_DIR are filtered out because they are usually wrong
413+
// on the remote machine (SPARK-12345) (SPARK-25934)
414+
private val BLACKLISTED_SPARK_ENV_VARS = Set("SPARK_ENV_LOADED", "SPARK_HOME", "SPARK_CONF_DIR")
411415
private val REPORT_DRIVER_STATUS_INTERVAL = 1000
412416
private val REPORT_DRIVER_STATUS_MAX_TRIES = 10
413417
val PROTOCOL_VERSION = "v1"
@@ -417,9 +421,7 @@ private[spark] object RestSubmissionClient {
417421
*/
418422
private[rest] def filterSystemEnvironment(env: Map[String, String]): Map[String, String] = {
419423
env.filterKeys { k =>
420-
// SPARK_HOME is filtered out because it is usually wrong on the remote machine (SPARK-12345)
421-
(k.startsWith("SPARK_") && k != "SPARK_ENV_LOADED" && k != "SPARK_HOME") ||
422-
k.startsWith("MESOS_")
424+
(k.startsWith("SPARK_") && !BLACKLISTED_SPARK_ENV_VARS.contains(k)) || k.startsWith("MESOS_")
423425
}
424426
}
425427
}

core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -376,6 +376,18 @@ class StandaloneRestSubmitSuite extends SparkFunSuite with BeforeAndAfterEach {
376376
assert(filteredVariables == Map("SPARK_VAR" -> "1"))
377377
}
378378

379+
test("client does not send 'SPARK_HOME' env var by default") {
380+
val environmentVariables = Map("SPARK_VAR" -> "1", "SPARK_HOME" -> "1")
381+
val filteredVariables = RestSubmissionClient.filterSystemEnvironment(environmentVariables)
382+
assert(filteredVariables == Map("SPARK_VAR" -> "1"))
383+
}
384+
385+
test("client does not send 'SPARK_CONF_DIR' env var by default") {
386+
val environmentVariables = Map("SPARK_VAR" -> "1", "SPARK_CONF_DIR" -> "1")
387+
val filteredVariables = RestSubmissionClient.filterSystemEnvironment(environmentVariables)
388+
assert(filteredVariables == Map("SPARK_VAR" -> "1"))
389+
}
390+
379391
test("client includes mesos env vars") {
380392
val environmentVariables = Map("SPARK_VAR" -> "1", "MESOS_VAR" -> "1", "OTHER_VAR" -> "1")
381393
val filteredVariables = RestSubmissionClient.filterSystemEnvironment(environmentVariables)

0 commit comments

Comments
 (0)