Skip to content
This repository was archived by the owner on Oct 23, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ private[spark] class MesosClusterScheduler(
containerInfo
}

private def getDriverCommandValue(desc: MesosDriverDescription): String = {
private[scheduler] def getDriverCommandValue(desc: MesosDriverDescription): String = {
val dockerDefined = desc.conf.contains("spark.mesos.executor.docker.image")
val executorUri = getDriverExecutorURI(desc)
// Gets the path to run spark-submit, and the path to the Mesos sandbox.
Expand Down Expand Up @@ -508,14 +508,14 @@ private[spark] class MesosClusterScheduler(

private def generateCmdOption(desc: MesosDriverDescription, sandboxPath: String): Seq[String] = {
var options = Seq(
"--name", desc.conf.get("spark.app.name"),
"--name", shellEscape(desc.conf.get("spark.app.name")),
"--master", s"mesos://${conf.get("spark.master")}",
"--driver-cores", desc.cores.toString,
"--driver-memory", s"${desc.mem}M")

// Assume empty main class means we're running python
if (!desc.command.mainClass.equals("")) {
options ++= Seq("--class", desc.command.mainClass)
options ++= Seq("--class", shellEscape(desc.command.mainClass))
}

desc.conf.getOption("spark.executor.memory").foreach { v =>
Expand All @@ -542,7 +542,7 @@ private[spark] class MesosClusterScheduler(
.filter { case (key, _) => !replicatedOptionsBlacklist.contains(key) }
.toMap
(defaultConf ++ driverConf).foreach { case (key, value) =>
options ++= Seq("--conf", s""""$key=${shellEscape(value)}"""".stripMargin) }
options ++= Seq("--conf", s"$key=${shellEscape(value)}") }

options
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,20 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi
}

private def testDriverDescription(submissionId: String): MesosDriverDescription = {
testDriverDescription(submissionId, Map[String, String]())
}

private def testDriverDescription(
submissionId: String,
schedulerProps: Map[String, String]): MesosDriverDescription = {
new MesosDriverDescription(
"d1",
"jar",
1000,
1,
true,
command,
Map[String, String](),
schedulerProps,
submissionId,
new Date())
}
Expand Down Expand Up @@ -199,6 +205,46 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext wi
})
}

test("escapes spark.app.name correctly") {
setScheduler()

val driverDesc = testDriverDescription("s1", Map[String, String](
"spark.app.name" -> "AnApp With $pecialChars.py",
"spark.mesos.executor.home" -> "test"
))

val cmdString = scheduler.getDriverCommandValue(driverDesc)
assert(cmdString.contains("AnApp With \\$pecialChars.py"))
}

test("escapes extraJavaOptions correctly") {
setScheduler()

val driverDesc = testDriverDescription("s1", Map[String, String](
"spark.app.name" -> "app.py",
"spark.mesos.executor.home" -> "test",
"spark.driver.extraJavaOptions" -> "-DparamA=\"val1 val2\" -Dpath=$PATH"
))

val cmdString = scheduler.getDriverCommandValue(driverDesc)
assert(cmdString.contains(
"spark.driver.extraJavaOptions=\"-DparamA=\\\"val1 val2\\\" -Dpath=\\$PATH"))
}

test("does not escape $MESOS_SANDBOX for --py-files when using a docker image") {
setScheduler()

val driverDesc = testDriverDescription("s1", Map[String, String](
"spark.app.name" -> "app.py",
"spark.mesos.executor.docker.image" -> "test/spark:01",
"spark.submit.pyFiles" -> "http://site.com/extraPythonFile.py"
))

val cmdString = scheduler.getDriverCommandValue(driverDesc)
assert(!cmdString.contains("\\$MESOS_SANDBOX/extraPythonFile.py"))
assert(cmdString.contains("$MESOS_SANDBOX/extraPythonFile.py"))
}

test("supports spark.mesos.driverEnv.*") {
setScheduler()

Expand Down