Skip to content

Commit

Permalink
Merge branch 'master' of github.com:apache/spark into ml
Browse files Browse the repository at this point in the history
  • Loading branch information
Davies Liu committed Jan 28, 2015
2 parents a4f4dbf + 119f45d commit 090b3a3
Show file tree
Hide file tree
Showing 155 changed files with 6,392 additions and 2,104 deletions.
2 changes: 2 additions & 0 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,10 @@
</includes>
<excludes>
<exclude>com/google/common/base/Absent*</exclude>
<exclude>com/google/common/base/Function</exclude>
<exclude>com/google/common/base/Optional*</exclude>
<exclude>com/google/common/base/Present*</exclude>
<exclude>com/google/common/base/Supplier</exclude>
</excludes>
</relocation>
</relocations>
Expand Down
5 changes: 3 additions & 2 deletions bin/spark-class
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ FWDIR="$(cd "`dirname "$0"`"/..; pwd)"

# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"
export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"$SPARK_HOME/conf"}"

. "$FWDIR"/bin/load-spark-env.sh

Expand Down Expand Up @@ -120,8 +121,8 @@ fi
JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM"

# Load extra JAVA_OPTS from conf/java-opts, if it exists
if [ -e "$FWDIR/conf/java-opts" ] ; then
JAVA_OPTS="$JAVA_OPTS `cat "$FWDIR"/conf/java-opts`"
if [ -e "$SPARK_CONF_DIR/java-opts" ] ; then
JAVA_OPTS="$JAVA_OPTS `cat "$SPARK_CONF_DIR"/java-opts`"
fi

# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!
Expand Down
8 changes: 4 additions & 4 deletions build/mvn
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,10 @@ install_app() {
# Install maven under the build/ folder
install_mvn() {
install_app \
"http://apache.claz.org/maven/maven-3/3.2.3/binaries" \
"apache-maven-3.2.3-bin.tar.gz" \
"apache-maven-3.2.3/bin/mvn"
MVN_BIN="${_DIR}/apache-maven-3.2.3/bin/mvn"
"http://archive.apache.org/dist/maven/maven-3/3.2.5/binaries" \
"apache-maven-3.2.5-bin.tar.gz" \
"apache-maven-3.2.5/bin/mvn"
MVN_BIN="${_DIR}/apache-maven-3.2.5/bin/mvn"
}

# Install zinc under the build/ folder
Expand Down
2 changes: 2 additions & 0 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -372,8 +372,10 @@
<artifact>com.google.guava:guava</artifact>
<includes>
<include>com/google/common/base/Absent*</include>
<include>com/google/common/base/Function</include>
<include>com/google/common/base/Optional*</include>
<include>com/google/common/base/Present*</include>
<include>com/google/common/base/Supplier</include>
</includes>
</filter>
</filters>
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/resources/org/apache/spark/ui/static/webui.css
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@ span.additional-metric-title {

/* Hide all additional metrics by default. This is done here rather than using JavaScript to
* avoid slow page loads for stage pages with large numbers (e.g., thousands) of tasks. */
.scheduler_delay, .deserialization_time, .serialization_time, .getting_result_time {
.scheduler_delay, .deserialization_time, .fetch_wait_time, .serialization_time,
.getting_result_time {
display: none;
}
20 changes: 11 additions & 9 deletions core/src/main/scala/org/apache/spark/Logging.scala
Original file line number Diff line number Diff line change
Expand Up @@ -118,15 +118,17 @@ trait Logging {
// org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently
// org.apache.logging.slf4j.Log4jLoggerFactory
val usingLog4j12 = "org.slf4j.impl.Log4jLoggerFactory".equals(binderClass)
val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
if (!log4j12Initialized && usingLog4j12) {
val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
case Some(url) =>
PropertyConfigurator.configure(url)
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
case None =>
System.err.println(s"Spark was unable to load $defaultLogProps")
if (usingLog4j12) {
val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
if (!log4j12Initialized) {
val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
case Some(url) =>
PropertyConfigurator.configure(url)
System.err.println(s"Using Spark's default log4j profile: $defaultLogProps")
case None =>
System.err.println(s"Spark was unable to load $defaultLogProps")
}
}
}
Logging.initialized = true
Expand Down
37 changes: 20 additions & 17 deletions core/src/main/scala/org/apache/spark/SparkConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,11 @@

package org.apache.spark

import java.util.concurrent.ConcurrentHashMap

import scala.collection.JavaConverters._
import scala.collection.mutable.{HashMap, LinkedHashSet}
import scala.collection.mutable.LinkedHashSet

import org.apache.spark.serializer.KryoSerializer

/**
Expand Down Expand Up @@ -46,12 +49,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Create a SparkConf that loads defaults from system properties and the classpath */
def this() = this(true)

private[spark] val settings = new HashMap[String, String]()
private val settings = new ConcurrentHashMap[String, String]()

if (loadDefaults) {
// Load any spark.* system properties
for ((k, v) <- System.getProperties.asScala if k.startsWith("spark.")) {
settings(k) = v
set(k, v)
}
}

Expand All @@ -63,7 +66,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
if (value == null) {
throw new NullPointerException("null value for " + key)
}
settings(key) = value
settings.put(key, value)
this
}

Expand Down Expand Up @@ -129,15 +132,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {

/** Set multiple parameters together */
def setAll(settings: Traversable[(String, String)]) = {
this.settings ++= settings
this.settings.putAll(settings.toMap.asJava)
this
}

/** Set a parameter if it isn't already configured */
def setIfMissing(key: String, value: String): SparkConf = {
if (!settings.contains(key)) {
settings(key) = value
}
settings.putIfAbsent(key, value)
this
}

Expand All @@ -163,21 +164,23 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {

/** Get a parameter; throws a NoSuchElementException if it's not set */
def get(key: String): String = {
settings.getOrElse(key, throw new NoSuchElementException(key))
getOption(key).getOrElse(throw new NoSuchElementException(key))
}

/** Get a parameter, falling back to a default if not set */
def get(key: String, defaultValue: String): String = {
settings.getOrElse(key, defaultValue)
getOption(key).getOrElse(defaultValue)
}

/** Get a parameter as an Option */
def getOption(key: String): Option[String] = {
settings.get(key)
Option(settings.get(key))
}

/** Get all parameters as a list of pairs */
def getAll: Array[(String, String)] = settings.clone().toArray
def getAll: Array[(String, String)] = {
settings.entrySet().asScala.map(x => (x.getKey, x.getValue)).toArray
}

/** Get a parameter as an integer, falling back to a default if not set */
def getInt(key: String, defaultValue: Int): Int = {
Expand Down Expand Up @@ -224,11 +227,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
def getAppId: String = get("spark.app.id")

/** Does the configuration contain a given parameter? */
def contains(key: String): Boolean = settings.contains(key)
def contains(key: String): Boolean = settings.containsKey(key)

/** Copy this object */
override def clone: SparkConf = {
new SparkConf(false).setAll(settings)
new SparkConf(false).setAll(getAll)
}

/**
Expand All @@ -240,7 +243,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
/** Checks for illegal or deprecated config settings. Throws an exception for the former. Not
* idempotent - may mutate this conf object to convert deprecated settings to supported ones. */
private[spark] def validateSettings() {
if (settings.contains("spark.local.dir")) {
if (contains("spark.local.dir")) {
val msg = "In Spark 1.0 and later spark.local.dir will be overridden by the value set by " +
"the cluster manager (via SPARK_LOCAL_DIRS in mesos/standalone and LOCAL_DIRS in YARN)."
logWarning(msg)
Expand All @@ -265,7 +268,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
}

// Validate spark.executor.extraJavaOptions
settings.get(executorOptsKey).map { javaOpts =>
getOption(executorOptsKey).map { javaOpts =>
if (javaOpts.contains("-Dspark")) {
val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts'). " +
"Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit."
Expand Down Expand Up @@ -345,7 +348,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
* configuration out for debugging.
*/
def toDebugString: String = {
settings.toArray.sorted.map{case (k, v) => k + "=" + v}.mkString("\n")
getAll.sorted.map{case (k, v) => k + "=" + v}.mkString("\n")
}
}

Expand Down
Loading

0 comments on commit 090b3a3

Please sign in to comment.