@@ -50,7 +50,7 @@ import scala.collection.mutable.ArrayBuffer
5050 * Spark does not currently support encryption after authentication.
5151 *
5252 * At this point spark has multiple communication protocols that need to be secured and
53- * different underlying mechisms are used depending on the protocol:
53+ * different underlying mechanisms are used depending on the protocol:
5454 *
5555 * - Akka -> The only option here is to use the Akka Remote secure-cookie functionality.
5656 * Akka remoting allows you to specify a secure cookie that will be exchanged
@@ -108,7 +108,7 @@ import scala.collection.mutable.ArrayBuffer
108108 * SparkUI can be configured to check the logged in user against the list of users who
109109 * have view acls to see if that user is authorized.
110110 * The filters can also be used for many different purposes. For instance filters
111- * could be used for logging, encypryption , or compression.
111+ * could be used for logging, encryption , or compression.
112112 *
113113 * The exact mechanisms used to generate/distributed the shared secret is deployment specific.
114114 *
@@ -122,15 +122,11 @@ import scala.collection.mutable.ArrayBuffer
122122 * filters to do authentication. That authentication then happens via the ResourceManager Proxy
123123 * and Spark will use that to do authorization against the view acls.
124124 *
125- * For other Spark deployments, the shared secret should be specified via the SPARK_SECRET
125+ * For other Spark deployments, the shared secret must be specified via the SPARK_SECRET
126126 * environment variable. This isn't ideal but it means only the user who starts the process
127- * has access to view that variable. Note that Spark does try to generate a secret for
128- * you if the SPARK_SECRET environment variable is not set, but it gets put into the java
129- * system property which can be viewed by other users, so setting the SPARK_SECRET environment
130- * variable is recommended.
131- * All the nodes (Master and Workers) need to have the same shared secret
132- * and all the applications running need to have that same shared secret. This again
133- * is not ideal as one user could potentially affect another users application.
127+ * has access to view that variable.
128+ * All the nodes (Master and Workers) and the applications need to have the same shared secret.
129+ * This again is not ideal as one user could potentially affect another users application.
134130 * This should be enhanced in the future to provide better protection.
135131 * If the UI needs to be secured the user needs to install a javax servlet filter to do the
136132 * authentication. Spark will then use that user to compare against the view acls to do
@@ -152,7 +148,8 @@ private[spark] class SecurityManager extends Logging {
152148 private val viewAcls = aclUsers.map(_.trim()).filter(! _.isEmpty).toSet
153149
154150 private val secretKey = generateSecretKey()
155- logDebug(" is auth enabled = " + authOn + " is uiAcls enabled = " + uiAclsOn)
151+ logInfo(" SecurityManager, is authentication enabled: " + authOn +
152+ " are ui acls enabled: " + uiAclsOn)
156153
157154 // Set our own authenticator to properly negotiate user/password for HTTP connections.
158155 // This is needed by the HTTP client fetching from the HttpServer. Put here so its
@@ -170,7 +167,7 @@ private[spark] class SecurityManager extends Logging {
170167 return passAuth
171168 }
172169 }
173- );
170+ )
174171 }
175172
176173 /**
@@ -179,16 +176,12 @@ private[spark] class SecurityManager extends Logging {
179176 * The way the key is stored depends on the Spark deployment mode. Yarn
180177 * uses the Hadoop UGI.
181178 *
182- * For non-Yarn deployments, If the environment variable is not set already
183- * we generate a secret and since we can't set an environment variable dynamically
184- * we set the java system property SPARK_SECRET. This will allow it to automatically
185- * work in certain situations. Others this still will not work and this definitely is
186- * not ideal since other users can see it. We should switch to put it in
187- * a config once Spark supports configs.
179+ * For non-Yarn deployments, If the environment variable is not set
180+ * we throw an exception.
188181 */
189182 private def generateSecretKey (): String = {
190183 if (! isAuthenticationEnabled) return null
191- // first check to see if the secret is already set, else generate a new one
184+ // first check to see if the secret is already set, else generate a new one if on yarn
192185 if (SparkHadoopUtil .get.isYarnMode) {
193186 val secretKey = SparkHadoopUtil .get.getSecretKeyFromUserCredentials(sparkSecretLookupKey)
194187 if (secretKey != null ) {
@@ -200,17 +193,17 @@ private[spark] class SecurityManager extends Logging {
200193 }
201194 val secret = System .getProperty(" SPARK_SECRET" , System .getenv(" SPARK_SECRET" ))
202195 if (secret != null && ! secret.isEmpty()) return secret
203- // generate one
204- val sCookie = akka.util.Crypt .generateSecureCookie
205-
206- // if we generated the secret then we must be the first so lets set it so t
207- // gets used by everyone else
196+ val sCookie = if (SparkHadoopUtil .get.isYarnMode) {
197+ // generate one
198+ akka.util.Crypt .generateSecureCookie
199+ } else {
200+ throw new Exception (" Error: a secret key must be specified via SPARK_SECRET env variable" )
201+ }
208202 if (SparkHadoopUtil .get.isYarnMode) {
203+ // if we generated the secret then we must be the first so lets set it so t
204+ // gets used by everyone else
209205 SparkHadoopUtil .get.addSecretKeyToUserCredentials(sparkSecretLookupKey, sCookie)
210- logDebug(" adding secret to credentials yarn mode" )
211- } else {
212- System .setProperty(" SPARK_SECRET" , sCookie)
213- logDebug(" adding secret to java property" )
206+ logInfo(" adding secret to credentials in yarn mode" )
214207 }
215208 sCookie
216209 }
@@ -223,7 +216,9 @@ private[spark] class SecurityManager extends Logging {
223216
224217 /**
225218 * Checks the given user against the view acl list to see if they have
226- * authorization to view the UI.
219+ * authorization to view the UI. If the UI acls must are disabled
220+ * via spark.ui.acls.enable, all users have view access.
221+ *
227222 * @param user to see if is authorized
228223 * @return true is the user has permission, otherwise false
229224 */
0 commit comments