Skip to content

Commit

Permalink
[SPARK-12311][CORE] Restore previous value of "os.arch" property in t…
Browse files Browse the repository at this point in the history
…est suites after forcing to set specific value to "os.arch" property

Restore the original value of os.arch property after each test

Since some of tests forced to set the specific value to os.arch property, we need to set the original value.

Author: Kazuaki Ishizaki <[email protected]>

Closes apache#10289 from kiszk/SPARK-12311.
  • Loading branch information
kiszk authored and srowen committed Dec 24, 2015
1 parent 9e85bb7 commit 3920466
Show file tree
Hide file tree
Showing 49 changed files with 338 additions and 142 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ class IndexShuffleBlockResolverSuite extends SparkFunSuite with BeforeAndAfterEa
private val conf: SparkConf = new SparkConf(loadDefaults = false)

override def beforeEach(): Unit = {
super.beforeEach()
tempDir = Utils.createTempDir()
MockitoAnnotations.initMocks(this)

Expand All @@ -55,7 +56,11 @@ class IndexShuffleBlockResolverSuite extends SparkFunSuite with BeforeAndAfterEa
}

override def afterEach(): Unit = {
Utils.deleteRecursively(tempDir)
try {
Utils.deleteRecursively(tempDir)
} finally {
super.afterEach()
}
}

test("commit shuffle files multiple times") {
Expand Down
7 changes: 5 additions & 2 deletions core/src/test/scala/org/apache/spark/CheckpointSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -256,8 +256,11 @@ class CheckpointSuite extends SparkFunSuite with RDDCheckpointTester with LocalS
}

override def afterEach(): Unit = {
super.afterEach()
Utils.deleteRecursively(checkpointDir)
try {
Utils.deleteRecursively(checkpointDir)
} finally {
super.afterEach()
}
}

override def sparkContext: SparkContext = sc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class ExternalShuffleServiceSuite extends ShuffleSuite with BeforeAndAfterAll {
var rpcHandler: ExternalShuffleBlockHandler = _

override def beforeAll() {
super.beforeAll()
val transportConf = SparkTransportConf.fromSparkConf(conf, "shuffle", numUsableCores = 2)
rpcHandler = new ExternalShuffleBlockHandler(transportConf, null)
val transportContext = new TransportContext(transportConf, rpcHandler)
Expand All @@ -46,7 +47,11 @@ class ExternalShuffleServiceSuite extends ShuffleSuite with BeforeAndAfterAll {
}

override def afterAll() {
server.close()
try {
server.close()
} finally {
super.afterAll()
}
}

// This test ensures that the external shuffle service is actually in use for the other tests.
Expand Down
7 changes: 5 additions & 2 deletions core/src/test/scala/org/apache/spark/FileServerSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,11 @@ class FileServerSuite extends SparkFunSuite with LocalSparkContext {
}

override def afterAll() {
super.afterAll()
Utils.deleteRecursively(tmpDir)
try {
Utils.deleteRecursively(tmpDir)
} finally {
super.afterAll()
}
}

test("Distributing files locally") {
Expand Down
7 changes: 5 additions & 2 deletions core/src/test/scala/org/apache/spark/FileSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,11 @@ class FileSuite extends SparkFunSuite with LocalSparkContext {
}

override def afterEach() {
super.afterEach()
Utils.deleteRecursively(tempDir)
try {
Utils.deleteRecursively(tempDir)
} finally {
super.afterEach()
}
}

test("text files") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class HashShuffleSuite extends ShuffleSuite with BeforeAndAfterAll {
// This test suite should run all tests in ShuffleSuite with hash-based shuffle.

override def beforeAll() {
super.beforeAll()
conf.set("spark.shuffle.manager", "hash")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ class HeartbeatReceiverSuite
* that uses a manual clock.
*/
override def beforeEach(): Unit = {
super.beforeEach()
val conf = new SparkConf()
.setMaster("local[2]")
.setAppName("test")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,11 @@ class JobCancellationSuite extends SparkFunSuite with Matchers with BeforeAndAft
with LocalSparkContext {

override def afterEach() {
super.afterEach()
resetSparkContext()
try {
resetSparkContext()
} finally {
super.afterEach()
}
}

test("local mode, FIFO scheduler") {
Expand Down
9 changes: 6 additions & 3 deletions core/src/test/scala/org/apache/spark/LocalSparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,16 @@ trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { self
@transient var sc: SparkContext = _

override def beforeAll() {
InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory())
super.beforeAll()
InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory())
}

override def afterEach() {
resetSparkContext()
super.afterEach()
try {
resetSparkContext()
} finally {
super.afterEach()
}
}

def resetSparkContext(): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ package org.apache.spark

import java.io.File

import org.apache.spark.util.{SparkConfWithEnv, Utils}
import org.apache.spark.util.{ResetSystemProperties, SparkConfWithEnv, Utils}

class SecurityManagerSuite extends SparkFunSuite {
class SecurityManagerSuite extends SparkFunSuite with ResetSystemProperties {

test("set security with conf") {
val conf = new SparkConf
Expand Down
11 changes: 7 additions & 4 deletions core/src/test/scala/org/apache/spark/SharedSparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,16 @@ trait SharedSparkContext extends BeforeAndAfterAll { self: Suite =>
var conf = new SparkConf(false)

override def beforeAll() {
_sc = new SparkContext("local[4]", "test", conf)
super.beforeAll()
_sc = new SparkContext("local[4]", "test", conf)
}

override def afterAll() {
LocalSparkContext.stop(_sc)
_sc = null
super.afterAll()
try {
LocalSparkContext.stop(_sc)
_sc = null
} finally {
super.afterAll()
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.

override def beforeAll() {
super.beforeAll()
conf.set("spark.shuffle.blockTransferService", "netty")
}
}
2 changes: 2 additions & 0 deletions core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,12 @@ class SortShuffleSuite extends ShuffleSuite with BeforeAndAfterAll {
private var tempDir: File = _

override def beforeAll() {
super.beforeAll()
conf.set("spark.shuffle.manager", "sort")
}

override def beforeEach(): Unit = {
super.beforeEach()
tempDir = Utils.createTempDir()
conf.set("spark.local.dir", tempDir.getAbsolutePath)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,15 +71,18 @@ class StandaloneDynamicAllocationSuite
}

override def afterAll(): Unit = {
masterRpcEnv.shutdown()
workerRpcEnvs.foreach(_.shutdown())
master.stop()
workers.foreach(_.stop())
masterRpcEnv = null
workerRpcEnvs = null
master = null
workers = null
super.afterAll()
try {
masterRpcEnv.shutdown()
workerRpcEnvs.foreach(_.shutdown())
master.stop()
workers.foreach(_.stop())
masterRpcEnv = null
workerRpcEnvs = null
master = null
workers = null
} finally {
super.afterAll()
}
}

test("dynamic allocation default behavior") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,18 @@ class AppClientSuite extends SparkFunSuite with LocalSparkContext with BeforeAnd
}

override def afterAll(): Unit = {
workerRpcEnvs.foreach(_.shutdown())
masterRpcEnv.shutdown()
workers.foreach(_.stop())
master.stop()
workerRpcEnvs = null
masterRpcEnv = null
workers = null
master = null
super.afterAll()
try {
workerRpcEnvs.foreach(_.shutdown())
masterRpcEnv.shutdown()
workers.foreach(_.stop())
master.stop()
workerRpcEnvs = null
masterRpcEnv = null
workers = null
master = null
} finally {
super.afterAll()
}
}

test("interface methods of AppClient using local Master") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import org.scalatest.mock.MockitoSugar

import org.apache.spark.{JsonTestUtils, SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.ui.{SparkUI, UIUtils}
import org.apache.spark.util.ResetSystemProperties

/**
* A collection of tests against the historyserver, including comparing responses from the json
Expand All @@ -43,7 +44,7 @@ import org.apache.spark.ui.{SparkUI, UIUtils}
* are considered part of Spark's public api.
*/
class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers with MockitoSugar
with JsonTestUtils {
with JsonTestUtils with ResetSystemProperties {

private val logDir = new File("src/test/resources/spark-events")
private val expRoot = new File("src/test/resources/HistoryServerExpectations/")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,12 @@ class StandaloneRestSubmitSuite extends SparkFunSuite with BeforeAndAfterEach {
private var server: Option[RestSubmissionServer] = None

override def afterEach() {
rpcEnv.foreach(_.shutdown())
server.foreach(_.stop())
try {
rpcEnv.foreach(_.shutdown())
server.foreach(_.stop())
} finally {
super.afterEach()
}
}

test("construct submit request") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ class WholeTextFileRecordReaderSuite extends SparkFunSuite with BeforeAndAfterAl
// hard-to-reproduce test failures, since any suites that were run after this one would inherit
// the new value of "fs.local.block.size" (see SPARK-5227 and SPARK-5679). To work around this,
// we disable FileSystem caching in this suite.
super.beforeAll()
val conf = new SparkConf().set("spark.hadoop.fs.file.impl.disable.cache", "true")

sc = new SparkContext("local", "test", conf)
Expand All @@ -59,7 +60,11 @@ class WholeTextFileRecordReaderSuite extends SparkFunSuite with BeforeAndAfterAl
}

override def afterAll() {
sc.stop()
try {
sc.stop()
} finally {
super.afterAll()
}
}

private def createNativeFile(inputDir: File, fileName: String, contents: Array[Byte],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,18 @@ class NettyBlockTransferServiceSuite
private var service1: NettyBlockTransferService = _

override def afterEach() {
if (service0 != null) {
service0.close()
service0 = null
}
try {
if (service0 != null) {
service0.close()
service0 = null
}

if (service1 != null) {
service1.close()
service1 = null
if (service1 != null) {
service1.close()
service1 = null
}
} finally {
super.afterEach()
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,17 @@ class AsyncRDDActionsSuite extends SparkFunSuite with BeforeAndAfterAll with Tim
@transient private var sc: SparkContext = _

override def beforeAll() {
super.beforeAll()
sc = new SparkContext("local[2]", "test")
}

override def afterAll() {
LocalSparkContext.stop(sc)
sc = null
try {
LocalSparkContext.stop(sc)
sc = null
} finally {
super.afterAll()
}
}

lazy val zeroPartRdd = new EmptyRDD[Int](sc)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import org.apache.spark.storage.{RDDBlockId, StorageLevel}
class LocalCheckpointSuite extends SparkFunSuite with LocalSparkContext {

override def beforeEach(): Unit = {
super.beforeEach()
sc = new SparkContext("local[2]", "test")
}

Expand Down
11 changes: 8 additions & 3 deletions core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll {
var env: RpcEnv = _

override def beforeAll(): Unit = {
super.beforeAll()
val conf = new SparkConf()
env = createRpcEnv(conf, "local", 0)

Expand All @@ -53,10 +54,14 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll {
}

override def afterAll(): Unit = {
if (env != null) {
env.shutdown()
try {
if (env != null) {
env.shutdown()
}
SparkEnv.set(null)
} finally {
super.afterAll()
}
SparkEnv.set(null)
}

def createRpcEnv(conf: SparkConf, name: String, port: Int, clientMode: Boolean = false): RpcEnv
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ class SerializationDebuggerSuite extends SparkFunSuite with BeforeAndAfterEach {
import SerializationDebugger.find

override def beforeEach(): Unit = {
super.beforeEach()
SerializationDebugger.enableDebugging = true
}

Expand Down
Loading

0 comments on commit 3920466

Please sign in to comment.