Skip to content

Commit

Permalink
SPARK-2645: Fix for SparkContext stop behavior
Browse files Browse the repository at this point in the history
  • Loading branch information
rekhajoshm committed Jun 25, 2015
1 parent 380c5b0 commit 58dba70
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 1 deletion.
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/SparkEnv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ class SparkEnv (

private[spark] def stop() {

if(!isStopped) {
if (!isStopped) {
isStopped = true
try {
pythonWorkers.foreach { case (key, worker) => worker.stop()}
Expand Down
21 changes: 21 additions & 0 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import org.apache.spark.util.Utils

import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.util.control.NonFatal

class SparkContextSuite extends SparkFunSuite with LocalSparkContext {

Expand Down Expand Up @@ -272,4 +273,24 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext {
sc.stop()
}
}

test("calling multiple sc.stop() must not throw uncaught exception(50) from sparkenv") {
var threwNoOrOnlyExceptedException = true
try {
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))
val cnt = sc.parallelize(1 to 4).count()
sc.cancelAllJobs()
sc.stop()
// call stop second time
sc.stop()
} catch {
case e: ServerStateException =>
// assert(!e.getMessage.contains("Server is already stopped"))
threwNoOrOnlyExceptedException = false
case NonFatal(e) =>
threwNoOrOnlyExceptedException = true
} finally {
assert(threwNoOrOnlyExceptedException == true)
}
}
}

0 comments on commit 58dba70

Please sign in to comment.