Skip to content

Commit 398859c

Browse files
committed
Don't call SparkUncaughtExceptionHandler in AppClient as it's in driver
1 parent 0eea12a commit 398859c

File tree

1 file changed

+8
-10
lines changed

1 file changed

+8
-10
lines changed

core/src/main/scala/org/apache/spark/deploy/client/AppClient.scala

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -125,16 +125,14 @@ private[spark] class AppClient(
125125
registerMasterFutures.set(tryRegisterAllMasters())
126126
registrationRetryTimer.set(registrationRetryThread.schedule(new Runnable {
127127
override def run(): Unit = {
128-
Utils.tryOrExit {
129-
if (registered.get) {
130-
registerMasterFutures.get.foreach(_.cancel(true))
131-
registerMasterThreadPool.shutdownNow()
132-
} else if (nthRetry >= REGISTRATION_RETRIES) {
133-
markDead("All masters are unresponsive! Giving up.")
134-
} else {
135-
registerMasterFutures.get.foreach(_.cancel(true))
136-
registerWithMaster(nthRetry + 1)
137-
}
128+
if (registered.get) {
129+
registerMasterFutures.get.foreach(_.cancel(true))
130+
registerMasterThreadPool.shutdownNow()
131+
} else if (nthRetry >= REGISTRATION_RETRIES) {
132+
markDead("All masters are unresponsive! Giving up.")
133+
} else {
134+
registerMasterFutures.get.foreach(_.cancel(true))
135+
registerWithMaster(nthRetry + 1)
138136
}
139137
}
140138
}, REGISTRATION_TIMEOUT_SECONDS, TimeUnit.SECONDS))

0 commit comments

Comments
 (0)