Skip to content

Commit

Permalink
SHS-NG M4.0: Initial UI hook up.
Browse files Browse the repository at this point in the history
This change adds some building blocks for hooking up the new data store
to the UI. This is achieved by returning a new SparkUI implementation when
using the new KVStoreProvider; this new UI does not currently contain any
data for the old UI / API endpoints; that will be implemented in M4.

The interaction between the UI and the underlying store was isolated
in a new AppStateStore class. The M4 code will call into this class to
retrieve data to populate the UI and API.

Some new indexed fields had to be added to the stored types so that the
code could efficiently process the API requests.

On the history server side, some changes were made in how the UI is used.
Because there's state kept on disk, the code needs to be more careful about
closing those resources when the UIs are unloaded; and because of that some
locking needs to exist to make sure it's OK to move files around. The app
cache was also simplified a bit; it just checks a flag in the UI instance
to check whether it should be used, and tries to re-load it when the FS
listing code invalidates a loaded UI.
  • Loading branch information
Marcelo Vanzin committed May 26, 2017
1 parent 22af29f commit b3e02d3
Show file tree
Hide file tree
Showing 16 changed files with 760 additions and 728 deletions.
13 changes: 11 additions & 2 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, StandaloneSchedulerBackend}
import org.apache.spark.scheduler.local.LocalSchedulerBackend
import org.apache.spark.status.AppStateStore
import org.apache.spark.storage._
import org.apache.spark.storage.BlockManagerMessages.TriggerThreadDump
import org.apache.spark.ui.{ConsoleProgressBar, SparkUI}
Expand Down Expand Up @@ -215,6 +216,7 @@ class SparkContext(config: SparkConf) extends Logging {
private var _jars: Seq[String] = _
private var _files: Seq[String] = _
private var _shutdownHookRef: AnyRef = _
private var _stateStore: AppStateStore = _

/* ------------------------------------------------------------------------------------- *
| Accessors and public fields. These provide access to the internal state of the |
Expand Down Expand Up @@ -428,6 +430,10 @@ class SparkContext(config: SparkConf) extends Logging {
_jobProgressListener = new JobProgressListener(_conf)
listenerBus.addListener(jobProgressListener)

// Initialize the app state store and listener before SparkEnv is created so that it gets
// all events.
_stateStore = AppStateStore.createTempStore(conf, listenerBus)

// Create the Spark execution environment (cache, map output tracker, etc)
_env = createSparkEnv(_conf, isLocal, listenerBus)
SparkEnv.set(_env)
Expand All @@ -449,8 +455,8 @@ class SparkContext(config: SparkConf) extends Logging {

_ui =
if (conf.getBoolean("spark.ui.enabled", true)) {
Some(SparkUI.createLiveUI(this, _conf, listenerBus, _jobProgressListener,
_env.securityManager, appName, startTime = startTime))
Some(SparkUI.create(Some(this), _stateStore, _conf, listenerBus, _env.securityManager,
appName, "", startTime))
} else {
// For tests, do not enable the UI
None
Expand Down Expand Up @@ -1939,6 +1945,9 @@ class SparkContext(config: SparkConf) extends Logging {
}
SparkEnv.set(null)
}
if (_stateStore != null) {
_stateStore.close()
}
// Clear this `InheritableThreadLocal`, or it will still be inherited in child threads even this
// `SparkContext` is stopped.
localProperties.remove()
Expand Down
Loading

0 comments on commit b3e02d3

Please sign in to comment.