Skip to content

Commit 5721c5a

Browse files
committed
update AkkaUtilsSuite test for the actorSelection changes, fix typos based on comments, and remove extra lines I missed in rebase from AkkaUtils
1 parent f351763 commit 5721c5a

File tree

3 files changed

+12
-39
lines changed

3 files changed

+12
-39
lines changed

core/src/main/scala/org/apache/spark/SecurityManager.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,11 @@ private[spark] class SecurityManager extends Logging {
3636
logDebug("is auth enabled = " + isAuthOn + " is uiAuth enabled = " + isUIAuthOn)
3737

3838
/**
39-
* In Yarn mode its uses Hadoop UGI to pass the secret as that
39+
* In Yarn mode it uses Hadoop UGI to pass the secret as that
4040
* will keep it protected. For a standalone SPARK cluster
4141
* use a environment variable SPARK_SECRET to specify the secret.
4242
* This probably isn't ideal but only the user who starts the process
43-
* should have access to view the variable (atleast on Linux).
43+
* should have access to view the variable (at least on Linux).
4444
* Since we can't set the environment variable we set the
4545
* java system property SPARK_SECRET so it will automatically
4646
* generate a secret is not specified. This definitely is not

core/src/main/scala/org/apache/spark/util/AkkaUtils.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,6 @@ private[spark] object AkkaUtils extends Logging {
100100
|akka.remote.log-remote-lifecycle-events = $lifecycleEvents
101101
|akka.log-dead-letters = $lifecycleEvents
102102
|akka.log-dead-letters-during-shutdown = $lifecycleEvents
103-
|akka.remote.netty.require-cookie = "$requireCookie"
104-
|akka.remote.netty.secure-cookie = "$secureCookie"
105103
""".stripMargin))
106104

107105
val actorSystem = if (indestructible) {

core/src/test/scala/org/apache/spark/AkkaUtilsSuite.scala

Lines changed: 10 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -51,29 +51,17 @@ class AkkaUtilsSuite extends FunSuite with LocalSparkContext {
5151
System.setProperty("SPARK_SECRET", "bad")
5252
val securityManagerBad= new SecurityManager();
5353

54+
assert(securityManagerBad.isAuthenticationEnabled() === true)
55+
5456
val (slaveSystem, _) = AkkaUtils.createActorSystem("spark-slave", hostname, 0,
5557
conf = conf, securityManager = securityManagerBad)
5658
val slaveTracker = new MapOutputTracker(conf)
5759
val selection = slaveSystem.actorSelection(
5860
s"akka.tcp://spark@localhost:$boundPort/user/MapOutputTracker")
5961
val timeout = AkkaUtils.lookupTimeout(conf)
60-
slaveTracker.trackerActor = Await.result(selection.resolveOne(timeout), timeout)
61-
62-
assert(securityManagerBad.isAuthenticationEnabled() === true)
63-
64-
masterTracker.registerShuffle(10, 1)
65-
masterTracker.incrementEpoch()
66-
slaveTracker.updateEpoch(masterTracker.getEpoch)
67-
68-
val compressedSize1000 = MapOutputTracker.compressSize(1000L)
69-
val size1000 = MapOutputTracker.decompressSize(compressedSize1000)
70-
masterTracker.registerMapOutput(10, 0, new MapStatus(
71-
BlockManagerId("a", "hostA", 1000, 0), Array(compressedSize1000)))
72-
masterTracker.incrementEpoch()
73-
slaveTracker.updateEpoch(masterTracker.getEpoch)
74-
75-
// this should fail since password wrong
76-
intercept[SparkException] { slaveTracker.getServerStatuses(10, 0) }
62+
intercept[akka.actor.ActorNotFound] {
63+
slaveTracker.trackerActor = Await.result(selection.resolveOne(timeout), timeout)
64+
}
7765

7866
actorSystem.shutdown()
7967
slaveSystem.shutdown()
@@ -198,30 +186,17 @@ class AkkaUtilsSuite extends FunSuite with LocalSparkContext {
198186
System.setProperty("SPARK_SECRET", "bad")
199187
val securityManagerBad = new SecurityManager();
200188

189+
assert(securityManagerBad.isAuthenticationEnabled() === false)
190+
201191
val (slaveSystem, _) = AkkaUtils.createActorSystem("spark-slave", hostname, 0,
202192
conf = conf, securityManager = securityManagerBad)
203193
val slaveTracker = new MapOutputTracker(conf)
204194
val selection = slaveSystem.actorSelection(
205195
s"akka.tcp://spark@localhost:$boundPort/user/MapOutputTracker")
206196
val timeout = AkkaUtils.lookupTimeout(conf)
207-
slaveTracker.trackerActor = Await.result(selection.resolveOne(timeout), timeout)
208-
209-
210-
assert(securityManagerBad.isAuthenticationEnabled() === false)
211-
212-
masterTracker.registerShuffle(10, 1)
213-
masterTracker.incrementEpoch()
214-
slaveTracker.updateEpoch(masterTracker.getEpoch)
215-
216-
val compressedSize1000 = MapOutputTracker.compressSize(1000L)
217-
val size1000 = MapOutputTracker.decompressSize(compressedSize1000)
218-
masterTracker.registerMapOutput(10, 0, new MapStatus(
219-
BlockManagerId("a", "hostA", 1000, 0), Array(compressedSize1000)))
220-
masterTracker.incrementEpoch()
221-
slaveTracker.updateEpoch(masterTracker.getEpoch)
222-
223-
// this should fail since security on in server and off in client
224-
intercept[SparkException] { slaveTracker.getServerStatuses(10, 0) }
197+
intercept[akka.actor.ActorNotFound] {
198+
slaveTracker.trackerActor = Await.result(selection.resolveOne(timeout), timeout)
199+
}
225200

226201
actorSystem.shutdown()
227202
slaveSystem.shutdown()

0 commit comments

Comments
 (0)