@@ -50,15 +50,13 @@ import org.apache.spark.ui.SparkUI
5050import org .apache .spark .util .{ClosureCleaner , MetadataCleaner , MetadataCleanerType , TimeStampedWeakValueHashMap , Utils }
5151
5252/**
53- * :: DeveloperApi ::
5453 * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
5554 * cluster, and can be used to create RDDs, accumulators and broadcast variables on that cluster.
5655 *
5756 * @param config a Spark Config object describing the application configuration. Any settings in
5857 * this config overrides the default configs as well as system properties.
5958 */
6059
61- @ DeveloperApi
6260class SparkContext (config : SparkConf ) extends Logging {
6361
6462 // This is used only by YARN for now, but should be relevant to other cluster types (Mesos,
@@ -276,27 +274,26 @@ class SparkContext(config: SparkConf) extends Logging {
276274 .getOrElse(512 )
277275
278276 // Environment variables to pass to our executors.
279- // NOTE: This should only be used for test related settings.
280- private [spark] val testExecutorEnvs = HashMap [String , String ]()
277+ private [spark] val executorEnvs = HashMap [String , String ]()
281278
282279 // Convert java options to env vars as a work around
283280 // since we can't set env vars directly in sbt.
284281 for { (envKey, propKey) <- Seq ((" SPARK_TESTING" , " spark.testing" ))
285282 value <- Option (System .getenv(envKey)).orElse(Option (System .getProperty(propKey)))} {
286- testExecutorEnvs (envKey) = value
283+ executorEnvs (envKey) = value
287284 }
288285 // The Mesos scheduler backend relies on this environment variable to set executor memory.
289286 // TODO: Set this only in the Mesos scheduler.
290- testExecutorEnvs (" SPARK_EXECUTOR_MEMORY" ) = executorMemory + " m"
291- testExecutorEnvs ++= conf.getExecutorEnv
287+ executorEnvs (" SPARK_EXECUTOR_MEMORY" ) = executorMemory + " m"
288+ executorEnvs ++= conf.getExecutorEnv
292289
293290 // Set SPARK_USER for user who is running SparkContext.
294291 val sparkUser = Option {
295292 Option (System .getProperty(" user.name" )).getOrElse(System .getenv(" SPARK_USER" ))
296293 }.getOrElse {
297294 SparkContext .SPARK_UNKNOWN_USER
298295 }
299- testExecutorEnvs (" SPARK_USER" ) = sparkUser
296+ executorEnvs (" SPARK_USER" ) = sparkUser
300297
301298 // Create and start the scheduler
302299 private [spark] var taskScheduler = SparkContext .createTaskScheduler(this , master)
@@ -1494,8 +1491,8 @@ object SparkContext extends Logging {
14941491 } catch {
14951492 // TODO: Enumerate the exact reasons why it can fail
14961493 // But irrespective of it, it means we cannot proceed !
1497- case th : Throwable => {
1498- throw new SparkException (" YARN mode not available ?" , th )
1494+ case e : Exception => {
1495+ throw new SparkException (" YARN mode not available ?" , e )
14991496 }
15001497 }
15011498 val backend = new CoarseGrainedSchedulerBackend (scheduler, sc.env.actorSystem)
@@ -1510,8 +1507,8 @@ object SparkContext extends Logging {
15101507 cons.newInstance(sc).asInstanceOf [TaskSchedulerImpl ]
15111508
15121509 } catch {
1513- case th : Throwable => {
1514- throw new SparkException (" YARN mode not available ?" , th )
1510+ case e : Exception => {
1511+ throw new SparkException (" YARN mode not available ?" , e )
15151512 }
15161513 }
15171514
@@ -1521,8 +1518,8 @@ object SparkContext extends Logging {
15211518 val cons = clazz.getConstructor(classOf [TaskSchedulerImpl ], classOf [SparkContext ])
15221519 cons.newInstance(scheduler, sc).asInstanceOf [CoarseGrainedSchedulerBackend ]
15231520 } catch {
1524- case th : Throwable => {
1525- throw new SparkException (" YARN mode not available ?" , th )
1521+ case e : Exception => {
1522+ throw new SparkException (" YARN mode not available ?" , e )
15261523 }
15271524 }
15281525
0 commit comments