From 446b0a4206f07525b726413444b6354dca55fea3 Mon Sep 17 00:00:00 2001 From: Joshi Date: Mon, 29 Jun 2015 20:22:51 -0700 Subject: [PATCH] Fix for SparkContext stop behavior --- .../scala/org/apache/spark/SparkEnv.scala | 28 ++++++++----------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala index c1ffe7840d9eb..e49779b3b094d 100644 --- a/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -95,22 +95,18 @@ class SparkEnv ( if (!isStopped) { isStopped = true - try { - pythonWorkers.foreach { case (key, worker) => worker.stop()} - Option(httpFileServer).foreach(_.stop()) - mapOutputTracker.stop() - shuffleManager.stop() - broadcastManager.stop() - blockManager.stop() - blockManager.master.stop() - metricsSystem.stop() - outputCommitCoordinator.stop() - rpcEnv.shutdown() - } catch { - case NonFatal(e) => - logInfo("Exception while SparkEnv stop", e) - throw e; - } + + pythonWorkers.foreach { case (key, worker) => worker.stop()} + Option(httpFileServer).foreach(_.stop()) + mapOutputTracker.stop() + shuffleManager.stop() + broadcastManager.stop() + blockManager.stop() + blockManager.master.stop() + metricsSystem.stop() + outputCommitCoordinator.stop() + rpcEnv.shutdown() + // Unfortunately Akka's awaitTermination doesn't actually wait for the Netty server to shut // down, but let's call it anyway in case it gets fixed in a later release