@@ -56,9 +56,9 @@ import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkContext, Spar
5656import org .apache .spark .util .Utils
5757
5858private [spark] class Client (
59- val args : ClientArguments ,
60- val hadoopConf : Configuration ,
61- val sparkConf : SparkConf )
59+ val args : ClientArguments ,
60+ val hadoopConf : Configuration ,
61+ val sparkConf : SparkConf )
6262 extends Logging {
6363
6464 import Client ._
@@ -122,8 +122,8 @@ private[spark] class Client(
122122 * This uses the YarnClientApplication not available in the Yarn alpha API.
123123 */
124124 def createApplicationSubmissionContext (
125- newApp : YarnClientApplication ,
126- containerContext : ContainerLaunchContext ): ApplicationSubmissionContext = {
125+ newApp : YarnClientApplication ,
126+ containerContext : ContainerLaunchContext ): ApplicationSubmissionContext = {
127127 val appContext = newApp.getApplicationSubmissionContext
128128 appContext.setApplicationName(args.appName)
129129 appContext.setQueue(args.amQueue)
@@ -190,9 +190,9 @@ private[spark] class Client(
190190 * for preparing resources for launching the ApplicationMaster container. Exposed for testing.
191191 */
192192 private [yarn] def copyFileToRemote (
193- destDir : Path ,
194- srcPath : Path ,
195- replication : Short ): Path = {
193+ destDir : Path ,
194+ srcPath : Path ,
195+ replication : Short ): Path = {
196196 val destFs = destDir.getFileSystem(hadoopConf)
197197 val srcFs = srcPath.getFileSystem(hadoopConf)
198198 var destPath = srcPath
@@ -462,7 +462,7 @@ private[spark] class Client(
462462
463463 // Keep this for backwards compatibility but users should move to the config
464464 sys.env.get(" SPARK_YARN_USER_ENV" ).foreach { userEnvs =>
465- // Allow users to specify some environment variables.
465+ // Allow users to specify some environment variables.
466466 YarnSparkHadoopUtil .setEnvFromInputString(env, userEnvs)
467467 // Pass SPARK_YARN_USER_ENV itself to the AM so it can use it to set up executor environments.
468468 env(" SPARK_YARN_USER_ENV" ) = userEnvs
@@ -522,7 +522,7 @@ private[spark] class Client(
522522 * This sets up the launch environment, java options, and the command for launching the AM.
523523 */
524524 private def createContainerLaunchContext (newAppResponse : GetNewApplicationResponse )
525- : ContainerLaunchContext = {
525+ : ContainerLaunchContext = {
526526 logInfo(" Setting up container launch context for our AM" )
527527 val appId = newAppResponse.getApplicationId
528528 val appStagingDir = getAppStagingDir(appId)
@@ -661,14 +661,14 @@ private[spark] class Client(
661661 val amArgs =
662662 Seq (amClass) ++ userClass ++ userJar ++ primaryPyFile ++ pyFiles ++ primaryRFile ++
663663 userArgs ++ Seq (
664- " --executor-memory" , args.executorMemory.toString + " m" ,
665- " --executor-cores" , args.executorCores.toString,
666- " --num-executors " , args.numExecutors.toString)
664+ " --executor-memory" , args.executorMemory.toString + " m" ,
665+ " --executor-cores" , args.executorCores.toString,
666+ " --num-executors " , args.numExecutors.toString)
667667
668668 // Command for the ApplicationMaster
669669 val commands = prefixEnv ++ Seq (
670- YarnSparkHadoopUtil .expandEnvironment(Environment .JAVA_HOME ) + " /bin/java" , " -server"
671- ) ++
670+ YarnSparkHadoopUtil .expandEnvironment(Environment .JAVA_HOME ) + " /bin/java" , " -server"
671+ ) ++
672672 javaOpts ++ amArgs ++
673673 Seq (
674674 " 1>" , ApplicationConstants .LOG_DIR_EXPANSION_VAR + " /stdout" ,
@@ -728,9 +728,9 @@ private[spark] class Client(
728728 * @return A pair of the yarn application state and the final application state.
729729 */
730730 def monitorApplication (
731- appId : ApplicationId ,
732- returnOnRunning : Boolean = false ,
733- logApplicationReport : Boolean = true ): (YarnApplicationState , FinalApplicationStatus ) = {
731+ appId : ApplicationId ,
732+ returnOnRunning : Boolean = false ,
733+ logApplicationReport : Boolean = true ): (YarnApplicationState , FinalApplicationStatus ) = {
734734 val interval = sparkConf.getLong(" spark.yarn.report.interval" , 1000 )
735735 var lastState : YarnApplicationState = null
736736 while (true ) {
@@ -1085,7 +1085,7 @@ object Client extends Logging {
10851085 val hiveConf = hiveClass.getMethod(" getConf" ).invoke(hive)
10861086 val hiveConfClass = mirror.classLoader.loadClass(" org.apache.hadoop.hive.conf.HiveConf" )
10871087
1088- val hiveConfGet = (param: String ) => Option (hiveConfClass
1088+ val hiveConfGet = (param : String ) => Option (hiveConfClass
10891089 .getMethod(" get" , classOf [java.lang.String ])
10901090 .invoke(hiveConf, param))
10911091
@@ -1107,7 +1107,7 @@ object Client extends Logging {
11071107
11081108 val hive2Token = new Token [DelegationTokenIdentifier ]()
11091109 hive2Token.decodeFromUrlString(tokenStr)
1110- credentials.addToken(new Text (" hive.server2.delegation.token" ),hive2Token)
1110+ credentials.addToken(new Text (" hive.server2.delegation.token" ), hive2Token)
11111111 logDebug(" Added hive.Server2.delegation.token to conf." )
11121112 hiveClass.getMethod(" closeCurrent" ).invoke(null )
11131113 } else {
@@ -1152,13 +1152,13 @@ object Client extends Logging {
11521152
11531153 logInfo(" Added HBase security token to credentials." )
11541154 } catch {
1155- case e: java.lang.NoSuchMethodException =>
1155+ case e : java.lang.NoSuchMethodException =>
11561156 logInfo(" HBase Method not found: " + e)
1157- case e: java.lang.ClassNotFoundException =>
1157+ case e : java.lang.ClassNotFoundException =>
11581158 logDebug(" HBase Class not found: " + e)
1159- case e: java.lang.NoClassDefFoundError =>
1159+ case e : java.lang.NoClassDefFoundError =>
11601160 logDebug(" HBase Class not found: " + e)
1161- case e: Exception =>
1161+ case e : Exception =>
11621162 logError(" Exception when obtaining HBase security token: " + e)
11631163 }
11641164 }
0 commit comments