diff --git a/core/src/main/scala/org/apache/spark/Dependency.scala b/core/src/main/scala/org/apache/spark/Dependency.scala index 905328ba6b5a4..1cd629c15bd46 100644 --- a/core/src/main/scala/org/apache/spark/Dependency.scala +++ b/core/src/main/scala/org/apache/spark/Dependency.scala @@ -56,7 +56,7 @@ class ShuffleDependency[K, V]( val shuffleId: Int = rdd.context.newShuffleId() - rdd.sparkContext.cleaner.registerShuffleForCleanup(this) + rdd.sparkContext.cleaner.foreach(_.registerShuffleForCleanup(this)) } diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala index ea22ad29bc885..50dbbe35f3745 100644 --- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala @@ -139,7 +139,7 @@ abstract class RDD[T: ClassTag]( } sc.persistRDD(this) // Register the RDD with the ContextCleaner for automatic GC-based cleanup - sc.cleaner.registerRDDForCleanup(this) + sc.cleaner.foreach(_.registerRDDForCleanup(this)) storageLevel = newLevel this } diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala index 3d95547b20fc1..9eb434ed0ac0e 100644 --- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala +++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala @@ -197,7 +197,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo } } - def cleaner = sc.cleaner + def cleaner = sc.cleaner.get } @@ -235,7 +235,7 @@ class CleanerTester( logInfo("Attempting to validate before cleanup:\n" + uncleanedResourcesToString) preCleanupValidate() - sc.cleaner.attachListener(cleanerListener) + sc.cleaner.get.attachListener(cleanerListener) /** Assert that all the stuff has been cleaned up */ def assertCleanup()(implicit waitTimeout: Eventually.Timeout) {