Skip to content

Commit 6896586

Browse files
committed
Add comments to addWebUIFilter
1 parent 3e9630b commit 6896586

File tree

2 files changed

+12
-12
lines changed

2 files changed

+12
-12
lines changed

core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ import org.apache.spark.{SparkEnv, Logging, SparkException, TaskState}
3131
import org.apache.spark.scheduler.{SchedulerBackend, SlaveLost, TaskDescription, TaskSchedulerImpl, WorkerOffer}
3232
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
3333
import org.apache.spark.util.{SerializableBuffer, AkkaUtils, Utils}
34+
import org.apache.spark.ui.JettyUtils
3435

3536
/**
3637
* A scheduler backend that waits for coarse grained executors to connect to it through Akka.
@@ -279,6 +280,17 @@ class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, actorSystem: A
279280
}
280281
false
281282
}
283+
284+
// Add filters to the SparkUI
285+
def addWebUIFilter(filterName: String, filterParams: String, proxyBase: String) {
286+
if (Seq(filterName, filterParams, proxyBase).forall(t => t != null && t.nonEmpty)) {
287+
logInfo(s"Add WebUI Filter. $filterName, $filterParams, $proxyBase")
288+
conf.set("spark.ui.filters", filterName)
289+
conf.set(s"spark.$filterName.params", filterParams)
290+
System.setProperty("spark.ui.proxyBase", proxyBase)
291+
JettyUtils.addFilters(scheduler.sc.ui.getHandlers, conf)
292+
}
293+
}
282294
}
283295

284296
private[spark] object CoarseGrainedSchedulerBackend {

yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClientSchedulerBackend.scala

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.scheduler.cluster
1919

2020
import org.apache.hadoop.yarn.api.records.{ApplicationId, YarnApplicationState}
21-
import org.apache.spark.ui.JettyUtils
2221
import org.apache.spark.{SparkException, Logging, SparkContext}
2322
import org.apache.spark.deploy.yarn.{Client, ClientArguments, ExecutorLauncher}
2423
import org.apache.spark.scheduler.TaskSchedulerImpl
@@ -117,15 +116,4 @@ private[spark] class YarnClientSchedulerBackend(
117116
logInfo("Stopped")
118117
}
119118

120-
override def addWebUIFilter(filterName: String, filterParams: String, proxyBase: String) {
121-
if (filterParams != null && filterParams.nonEmpty && proxyBase != null &&
122-
proxyBase.nonEmpty && filterName != null && filterName.nonEmpty) {
123-
logInfo(s"Add WebUI Filter. $filterName, $filterParams, $proxyBase")
124-
conf.set("spark.ui.filters", filterName)
125-
conf.set(s"spark.$filterName.params", filterParams)
126-
System.setProperty("spark.ui.proxyBase", proxyBase)
127-
JettyUtils.addFilters(sc.ui.getHandlers, conf)
128-
}
129-
}
130-
131119
}

0 commit comments

Comments
 (0)