File tree Expand file tree Collapse file tree 2 files changed +7
-0
lines changed 
core/src/main/scala/org/apache/spark/deploy Expand file tree Collapse file tree 2 files changed +7
-0
lines changed Original file line number Diff line number Diff line change @@ -75,6 +75,8 @@ class LocalSparkCluster(
7575    //  Stop the workers before the master so they don't get upset that it disconnected
7676    workerRpcEnvs.foreach(_.shutdown())
7777    masterRpcEnvs.foreach(_.shutdown())
78+     workerRpcEnvs.foreach(_.awaitTermination())
79+     masterRpcEnvs.foreach(_.awaitTermination())
7880    masterRpcEnvs.clear()
7981    workerRpcEnvs.clear()
8082  }
Original file line number Diff line number Diff line change @@ -71,6 +71,11 @@ private[deploy] class ExecutorRunner(
7171    workerThread.start()
7272    //  Shutdown hook that kills actors on shutdown.
7373    shutdownHook =  ShutdownHookManager .addShutdownHook { () => 
74+       //  It's possible that we arrive here before calling `fetchAndRunExecutor`, then `state` will
75+       //  be `ExecutorState.RUNNING`. In this case, we should set `state` to `FAILED`.
76+       if  (state ==  ExecutorState .RUNNING ) {
77+         state =  ExecutorState .FAILED 
78+       }
7479      killProcess(Some (" Worker shutting down"  )) }
7580  }
7681
    
 
   
 
     
   
   
          
     
  
    
     
 
    
      
     
 
     
    You can’t perform that action at this time.
  
 
    
  
     
    
      
        
     
 
       
      
     
   
 
    
    
  
 
  
 
     
    
0 commit comments