Skip to content

Commit

Permalink
Use OpenHashSet
Browse files Browse the repository at this point in the history
  • Loading branch information
pwendell committed Aug 14, 2014
1 parent 8ce2ff0 commit 33a3473
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging {
val (errorMessage, metrics): (Option[String], Option[TaskMetrics]) =
taskEnd.reason match {
case org.apache.spark.Success =>
stageData.completedIndices += info.index
stageData.completedIndices.add(info.index)
stageData.numCompleteTasks += 1
(None, Option(taskEnd.taskMetrics))
case e: ExceptionFailure => // Handle ExceptionFailure because we might have metrics
Expand Down
5 changes: 3 additions & 2 deletions core/src/main/scala/org/apache/spark/ui/jobs/UIData.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@ package org.apache.spark.ui.jobs

import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.{AccumulableInfo, TaskInfo}
import org.apache.spark.util.collection.OpenHashSet

import scala.collection.mutable.{HashMap, HashSet}
import scala.collection.mutable.HashMap

private[jobs] object UIData {

Expand All @@ -38,7 +39,7 @@ private[jobs] object UIData {
class StageUIData {
var numActiveTasks: Int = _
var numCompleteTasks: Int = _
var completedIndices = HashSet[Int]()
var completedIndices = new OpenHashSet[Int]()
var numFailedTasks: Int = _

var executorRunTime: Long = _
Expand Down

0 comments on commit 33a3473

Please sign in to comment.