Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit a55b28e

Browse files
committed
Merge remote-tracking branch 'origin/separate-executor-pod-construction' into separate-external-shuffle-management
2 parents 4926068 + 07cfca2 commit a55b28e

File tree

4 files changed

+19
-19
lines changed

4 files changed

+19
-19
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,5 +101,5 @@ package object constants {
101101
private[spark] val DRIVER_CONTAINER_NAME = "spark-kubernetes-driver"
102102
private[spark] val KUBERNETES_MASTER_INTERNAL_URL = "https://kubernetes.default.svc"
103103
private[spark] val MEMORY_OVERHEAD_FACTOR = 0.10
104-
private[spark] val MEMORY_OVERHEAD_MIN = 384L
104+
private[spark] val MEMORY_OVERHEAD_MIN_MIB = 384L
105105
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/BaseDriverConfigurationStep.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -46,13 +46,13 @@ private[spark] class BaseDriverConfigurationStep(
4646
private val driverLimitCores = submissionSparkConf.get(KUBERNETES_DRIVER_LIMIT_CORES)
4747

4848
// Memory settings
49-
private val driverMemoryMb = submissionSparkConf.get(
49+
private val driverMemoryMiB = submissionSparkConf.get(
5050
org.apache.spark.internal.config.DRIVER_MEMORY)
51-
private val memoryOverheadMb = submissionSparkConf
51+
private val memoryOverheadMiB = submissionSparkConf
5252
.get(KUBERNETES_DRIVER_MEMORY_OVERHEAD)
53-
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * driverMemoryMb).toInt,
54-
MEMORY_OVERHEAD_MIN))
55-
private val driverContainerMemoryWithOverhead = driverMemoryMb + memoryOverheadMb
53+
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * driverMemoryMiB).toInt,
54+
MEMORY_OVERHEAD_MIN_MIB))
55+
private val driverContainerMemoryWithOverheadMiB = driverMemoryMiB + memoryOverheadMiB
5656
private val driverDockerImage = submissionSparkConf.get(DRIVER_DOCKER_IMAGE)
5757

5858
override def configureDriver(
@@ -86,10 +86,10 @@ private[spark] class BaseDriverConfigurationStep(
8686
.withAmount(driverCpuCores)
8787
.build()
8888
val driverMemoryQuantity = new QuantityBuilder(false)
89-
.withAmount(s"${driverMemoryMb}M")
89+
.withAmount(s"${driverMemoryMiB}Mi")
9090
.build()
9191
val driverMemoryLimitQuantity = new QuantityBuilder(false)
92-
.withAmount(s"${driverContainerMemoryWithOverhead}M")
92+
.withAmount(s"${driverContainerMemoryWithOverheadMiB}Mi")
9393
.build()
9494
val maybeCpuLimitQuantity = driverLimitCores.map { limitCores =>
9595
("cpu", new QuantityBuilder(false).withAmount(limitCores).build())
@@ -102,7 +102,7 @@ private[spark] class BaseDriverConfigurationStep(
102102
.addToEnv(driverExtraClasspathEnv.toSeq: _*)
103103
.addNewEnv()
104104
.withName(ENV_DRIVER_MEMORY)
105-
.withValue(driverContainerMemoryWithOverhead + "m")
105+
.withValue(driverContainerMemoryWithOverheadMiB + "M") // JVM treats the "M" unit as "Mi"
106106
.endEnv()
107107
.addNewEnv()
108108
.withName(ENV_DRIVER_MAIN_CLASS)

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/ExecutorPodFactory.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -92,16 +92,16 @@ private[spark] class ExecutorPodFactoryImpl(
9292

9393
private val executorPodNamePrefix = sparkConf.get(KUBERNETES_EXECUTOR_POD_NAME_PREFIX)
9494

95-
private val executorMemoryMb = sparkConf.get(org.apache.spark.internal.config.EXECUTOR_MEMORY)
95+
private val executorMemoryMiB = sparkConf.get(org.apache.spark.internal.config.EXECUTOR_MEMORY)
9696
private val executorMemoryString = sparkConf.get(
9797
org.apache.spark.internal.config.EXECUTOR_MEMORY.key,
9898
org.apache.spark.internal.config.EXECUTOR_MEMORY.defaultValueString)
9999

100-
private val memoryOverheadMb = sparkConf
100+
private val memoryOverheadMiB = sparkConf
101101
.get(KUBERNETES_EXECUTOR_MEMORY_OVERHEAD)
102-
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * executorMemoryMb).toInt,
103-
MEMORY_OVERHEAD_MIN))
104-
private val executorMemoryWithOverhead = executorMemoryMb + memoryOverheadMb
102+
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * executorMemoryMiB).toInt,
103+
MEMORY_OVERHEAD_MIN_MIB))
104+
private val executorMemoryWithOverhead = executorMemoryMiB + memoryOverheadMiB
105105

106106
private val executorCores = sparkConf.getDouble("spark.executor.cores", 1d)
107107
private val executorLimitCores = sparkConf.getOption(KUBERNETES_EXECUTOR_LIMIT_CORES.key)
@@ -125,10 +125,10 @@ private[spark] class ExecutorPodFactoryImpl(
125125
SPARK_ROLE_LABEL -> SPARK_POD_EXECUTOR_ROLE) ++
126126
executorLabels
127127
val executorMemoryQuantity = new QuantityBuilder(false)
128-
.withAmount(s"${executorMemoryMb}M")
128+
.withAmount(s"${executorMemoryMiB}Mi")
129129
.build()
130130
val executorMemoryLimitQuantity = new QuantityBuilder(false)
131-
.withAmount(s"${executorMemoryWithOverhead}M")
131+
.withAmount(s"${executorMemoryWithOverhead}Mi")
132132
.build()
133133
val executorCpuQuantity = new QuantityBuilder(false)
134134
.withAmount(executorCores.toString)

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/BaseDriverConfigurationStepSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,17 +81,17 @@ private[spark] class BaseDriverConfigurationStepSuite extends SparkFunSuite {
8181
.toMap
8282
assert(envs.size === 6)
8383
assert(envs(ENV_SUBMIT_EXTRA_CLASSPATH) === "/opt/spark/spark-exmaples.jar")
84-
assert(envs(ENV_DRIVER_MEMORY) === "456m")
84+
assert(envs(ENV_DRIVER_MEMORY) === "456M")
8585
assert(envs(ENV_DRIVER_MAIN_CLASS) === MAIN_CLASS)
8686
assert(envs(ENV_DRIVER_ARGS) === "arg1 arg2")
8787
assert(envs(DRIVER_CUSTOM_ENV_KEY1) === "customDriverEnv1")
8888
assert(envs(DRIVER_CUSTOM_ENV_KEY2) === "customDriverEnv2")
8989
val resourceRequirements = preparedDriverSpec.driverContainer.getResources
9090
val requests = resourceRequirements.getRequests.asScala
9191
assert(requests("cpu").getAmount === "2")
92-
assert(requests("memory").getAmount === "256M")
92+
assert(requests("memory").getAmount === "256Mi")
9393
val limits = resourceRequirements.getLimits.asScala
94-
assert(limits("memory").getAmount === "456M")
94+
assert(limits("memory").getAmount === "456Mi")
9595
assert(limits("cpu").getAmount === "4")
9696
val driverPodMetadata = preparedDriverSpec.driverPod.getMetadata
9797
assert(driverPodMetadata.getName === "spark-driver-pod")

0 commit comments

Comments
 (0)