Skip to content

Commit

Permalink
Remove TestUtils.setSystemProperty, since it is subsumed by the Reset…
Browse files Browse the repository at this point in the history
…SystemProperties trait.
  • Loading branch information
JoshRosen committed Dec 24, 2014
1 parent 633a84a commit 8783ab0
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 51 deletions.
15 changes: 0 additions & 15 deletions core/src/main/scala/org/apache/spark/TestUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -107,19 +107,4 @@ private[spark] object TestUtils {
assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath())
out
}

/** Allows system properties to be changed in tests */
def withSystemProperty[T](property: String, value: String)(block: => T): T = {
val originalValue = System.getProperty(property)
try {
System.setProperty(property, value)
block
} finally {
if (originalValue == null) {
System.clearProperty(property)
} else {
System.setProperty(property, originalValue)
}
}
}
}
19 changes: 9 additions & 10 deletions core/src/test/scala/org/apache/spark/ShuffleSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,15 @@ abstract class ShuffleSuite extends FunSuite with Matchers with LocalSparkContex
conf.set("spark.test.noStageRetry", "true")

test("groupByKey without compression") {
TestUtils.withSystemProperty("spark.shuffle.compress", "false") {
sc = new SparkContext("local", "test", conf)
val pairs = sc.parallelize(Array((1, 1), (1, 2), (1, 3), (2, 1)), 4)
val groups = pairs.groupByKey(4).collect()
assert(groups.size === 2)
val valuesFor1 = groups.find(_._1 == 1).get._2
assert(valuesFor1.toList.sorted === List(1, 2, 3))
val valuesFor2 = groups.find(_._1 == 2).get._2
assert(valuesFor2.toList.sorted === List(1))
}
val myConf = conf.clone().set("spark.shuffle.compress", "false")
sc = new SparkContext("local", "test", myConf)
val pairs = sc.parallelize(Array((1, 1), (1, 2), (1, 3), (2, 1)), 4)
val groups = pairs.groupByKey(4).collect()
assert(groups.size === 2)
val valuesFor1 = groups.find(_._1 == 1).get._2
assert(valuesFor1.toList.sorted === List(1, 2, 3))
val valuesFor2 = groups.find(_._1 == 2).get._2
assert(valuesFor2.toList.sorted === List(1))
}

test("shuffle non-zero block size") {
Expand Down
50 changes: 24 additions & 26 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,46 +17,44 @@

package org.apache.spark

import org.apache.spark.util.ResetSystemProperties
import org.scalatest.FunSuite

import org.apache.hadoop.io.BytesWritable

class SparkContextSuite extends FunSuite with LocalSparkContext {
class SparkContextSuite extends FunSuite with ResetSystemProperties with LocalSparkContext {

test("Only one SparkContext may be active at a time") {
// Regression test for SPARK-4180
TestUtils.withSystemProperty("spark.driver.allowMultipleContexts", "false") {
val conf = new SparkConf().setAppName("test").setMaster("local")
sc = new SparkContext(conf)
// A SparkContext is already running, so we shouldn't be able to create a second one
intercept[SparkException] { new SparkContext(conf) }
// After stopping the running context, we should be able to create a new one
resetSparkContext()
sc = new SparkContext(conf)
}
System.setProperty("spark.driver.allowMultipleContexts", "false")
val conf = new SparkConf().setAppName("test").setMaster("local")
sc = new SparkContext(conf)
// A SparkContext is already running, so we shouldn't be able to create a second one
intercept[SparkException] { new SparkContext(conf) }
// After stopping the running context, we should be able to create a new one
resetSparkContext()
sc = new SparkContext(conf)
}

test("Can still construct a new SparkContext after failing to construct a previous one") {
TestUtils.withSystemProperty("spark.driver.allowMultipleContexts", "false") {
// This is an invalid configuration (no app name or master URL)
intercept[SparkException] {
new SparkContext(new SparkConf())
}
// Even though those earlier calls failed, we should still be able to create a new context
sc = new SparkContext(new SparkConf().setMaster("local").setAppName("test"))
System.setProperty("spark.driver.allowMultipleContexts", "false")
// This is an invalid configuration (no app name or master URL)
intercept[SparkException] {
new SparkContext(new SparkConf())
}
// Even though those earlier calls failed, we should still be able to create a new context
sc = new SparkContext(new SparkConf().setMaster("local").setAppName("test"))
}

test("Check for multiple SparkContexts can be disabled via undocumented debug option") {
TestUtils.withSystemProperty("spark.driver.allowMultipleContexts", "true") {
var secondSparkContext: SparkContext = null
try {
val conf = new SparkConf().setAppName("test").setMaster("local")
sc = new SparkContext(conf)
secondSparkContext = new SparkContext(conf)
} finally {
Option(secondSparkContext).foreach(_.stop())
}
System.setProperty("spark.driver.allowMultipleContexts", "true")
var secondSparkContext: SparkContext = null
try {
val conf = new SparkConf().setAppName("test").setMaster("local")
sc = new SparkContext(conf)
secondSparkContext = new SparkContext(conf)
} finally {
Option(secondSparkContext).foreach(_.stop())
}
}

Expand Down

0 comments on commit 8783ab0

Please sign in to comment.