Skip to content

Commit

Permalink
Treat checkpoint as an RDD operation
Browse files Browse the repository at this point in the history
  • Loading branch information
Andrew Or committed May 8, 2015
1 parent f496bf3 commit 19bc07b
Showing 1 changed file with 11 additions and 1 deletion.
12 changes: 11 additions & 1 deletion core/src/main/scala/org/apache/spark/rdd/RDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,16 @@ abstract class RDD[T: ClassTag](
*/
private[spark] def withScope[U](body: => U): U = RDDOperationScope.withScope[U](sc)(body)

/**
* Execute a block of code in a scope such that all new RDDs created in this body will
* be part of the same scope. For more detail, see {{org.apache.spark.rdd.RDDOperationScope}}.
*
* Note: Return statements are NOT allowed in the given body.
*/
private[spark] def withNamedScope[U](scopeName: String)(body: => U): U = {
RDDOperationScope.withScope[U](sc)(body)
}

// Transformations (return a new RDD)

/**
Expand Down Expand Up @@ -1512,7 +1522,7 @@ abstract class RDD[T: ClassTag](
* has completed (therefore the RDD has been materialized and potentially stored in memory).
* doCheckpoint() is called recursively on the parent RDDs.
*/
private[spark] def doCheckpoint() {
private[spark] def doCheckpoint(): Unit = withNamedScope("checkpoint") {
if (!doCheckpointCalled) {
doCheckpointCalled = true
if (checkpointData.isDefined) {
Expand Down

0 comments on commit 19bc07b

Please sign in to comment.