Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
Augustin Borsu committed Feb 16, 2015
2 parents 11ca50f + 1115e8e commit 196cd7a
Show file tree
Hide file tree
Showing 186 changed files with 4,704 additions and 2,426 deletions.
1 change: 1 addition & 0 deletions .rat-excludes
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
target
cache
.gitignore
.gitattributes
.project
Expand Down
139 changes: 10 additions & 129 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,6 @@
<spark.jar.dir>scala-${scala.binary.version}</spark.jar.dir>
<spark.jar.basename>spark-assembly-${project.version}-hadoop${hadoop.version}.jar</spark.jar.basename>
<spark.jar>${project.build.directory}/${spark.jar.dir}/${spark.jar.basename}</spark.jar>
<deb.pkg.name>spark</deb.pkg.name>
<deb.install.path>/usr/share/spark</deb.install.path>
<deb.user>root</deb.user>
<deb.bin.filemode>755</deb.bin.filemode>
</properties>

<dependencies>
Expand Down Expand Up @@ -118,6 +114,16 @@
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
<filter>
<!-- Exclude libgfortran, libgcc for license issues -->
<artifact>org.jblas:jblas</artifact>
<excludes>
<!-- Linux amd64 is OK; not statically linked -->
<exclude>lib/Linux/i386/**</exclude>
<exclude>lib/Mac OS X/**</exclude>
<exclude>lib/Windows/**</exclude>
</excludes>
</filter>
</filters>
</configuration>
<executions>
Expand Down Expand Up @@ -217,131 +223,6 @@
</plugins>
</build>
</profile>
<profile>
<id>deb</id>
<build>
<plugins>
<plugin>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>prepare-package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<echo>
NOTE: Debian packaging is deprecated and is scheduled to be removed in Spark 1.4.
</echo>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId>
<version>1.2</version>
<executions>
<execution>
<phase>validate</phase>
<goals>
<goal>create</goal>
</goals>
<configuration>
<shortRevisionLength>8</shortRevisionLength>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.vafer</groupId>
<artifactId>jdeb</artifactId>
<version>0.11</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>jdeb</goal>
</goals>
<configuration>
<deb>${project.build.directory}/${deb.pkg.name}_${project.version}-${buildNumber}_all.deb</deb>
<attach>false</attach>
<compression>gzip</compression>
<dataSet>
<data>
<src>${spark.jar}</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/jars</prefix>
</mapper>
</data>
<data>
<src>${basedir}/src/deb/RELEASE</src>
<type>file</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}</prefix>
</mapper>
</data>
<data>
<src>${basedir}/../conf</src>
<type>directory</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/conf</prefix>
<filemode>${deb.bin.filemode}</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../bin</src>
<type>directory</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/bin</prefix>
<filemode>${deb.bin.filemode}</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../sbin</src>
<type>directory</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/sbin</prefix>
<filemode>${deb.bin.filemode}</filemode>
</mapper>
</data>
<data>
<src>${basedir}/../python</src>
<type>directory</type>
<mapper>
<type>perm</type>
<user>${deb.user}</user>
<group>${deb.user}</group>
<prefix>${deb.install.path}/python</prefix>
<filemode>${deb.bin.filemode}</filemode>
</mapper>
</data>
</dataSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>kinesis-asl</id>
<dependencies>
Expand Down
2 changes: 0 additions & 2 deletions assembly/src/deb/RELEASE

This file was deleted.

8 changes: 0 additions & 8 deletions assembly/src/deb/control/control

This file was deleted.

4 changes: 2 additions & 2 deletions bin/compute-classpath.sh
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ fi

num_jars=0

for f in ${assembly_folder}/spark-assembly*hadoop*.jar; do
for f in "${assembly_folder}"/spark-assembly*hadoop*.jar; do
if [[ ! -e "$f" ]]; then
echo "Failed to find Spark assembly in $assembly_folder" 1>&2
echo "You need to build Spark before running this program." 1>&2
Expand All @@ -88,7 +88,7 @@ done

if [ "$num_jars" -gt "1" ]; then
echo "Found multiple Spark assembly jars in $assembly_folder:" 1>&2
ls ${assembly_folder}/spark-assembly*hadoop*.jar 1>&2
ls "${assembly_folder}"/spark-assembly*hadoop*.jar 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi
Expand Down
4 changes: 2 additions & 2 deletions bin/run-example
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ fi

JAR_COUNT=0

for f in ${JAR_PATH}/spark-examples-*hadoop*.jar; do
for f in "${JAR_PATH}"/spark-examples-*hadoop*.jar; do
if [[ ! -e "$f" ]]; then
echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" 1>&2
echo "You need to build Spark before running this program" 1>&2
Expand All @@ -54,7 +54,7 @@ done

if [ "$JAR_COUNT" -gt "1" ]; then
echo "Found multiple Spark examples assembly jars in ${JAR_PATH}" 1>&2
ls ${JAR_PATH}/spark-examples-*hadoop*.jar 1>&2
ls "${JAR_PATH}"/spark-examples-*hadoop*.jar 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi
Expand Down
10 changes: 0 additions & 10 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -329,16 +329,6 @@
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymockclassextension</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>asm</groupId>
<artifactId>asm</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/resources/org/apache/spark/ui/static/webui.css
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ span.additional-metric-title {

/* Hide all additional metrics by default. This is done here rather than using JavaScript to
* avoid slow page loads for stage pages with large numbers (e.g., thousands) of tasks. */
.scheduler_delay, .deserialization_time, .fetch_wait_time, .serialization_time,
.getting_result_time {
.scheduler_delay, .deserialization_time, .fetch_wait_time, .shuffle_read_remote,
.serialization_time, .getting_result_time {
display: none;
}
18 changes: 15 additions & 3 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.plugins.matcher.GlobPatternMatcher
import org.apache.ivy.plugins.resolver.{ChainResolver, IBiblioResolver}

import org.apache.spark.SPARK_VERSION
import org.apache.spark.deploy.rest._
import org.apache.spark.executor._
import org.apache.spark.util.{ChildFirstURLClassLoader, MutableURLClassLoader, Utils}
Expand Down Expand Up @@ -83,14 +84,25 @@ object SparkSubmit {
// Exposed for testing
private[spark] var exitFn: () => Unit = () => System.exit(1)
private[spark] var printStream: PrintStream = System.err
private[spark] def printWarning(str: String) = printStream.println("Warning: " + str)
private[spark] def printErrorAndExit(str: String) = {
private[spark] def printWarning(str: String): Unit = printStream.println("Warning: " + str)
private[spark] def printErrorAndExit(str: String): Unit = {
printStream.println("Error: " + str)
printStream.println("Run with --help for usage help or --verbose for debug output")
exitFn()
}
private[spark] def printVersionAndExit(): Unit = {
printStream.println("""Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version %s
/_/
""".format(SPARK_VERSION))
printStream.println("Type --help for more information.")
exitFn()
}

def main(args: Array[String]) {
def main(args: Array[String]): Unit = {
val appArgs = new SparkSubmitArguments(args)
if (appArgs.verbose) {
printStream.println(appArgs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -417,6 +417,9 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
verbose = true
parse(tail)

case ("--version") :: tail =>
SparkSubmit.printVersionAndExit()

case EQ_SEPARATED_OPT(opt, value) :: tail =>
parse(opt :: value :: tail)

Expand Down Expand Up @@ -485,6 +488,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String], env: Map[String, St
|
| --help, -h Show this help message and exit
| --verbose, -v Print additional debug output
| --version, Print the version of current Spark
|
| Spark standalone with cluster deploy mode only:
| --driver-cores NUM Cores for driver (Default: 1).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,7 @@ private[history] class FsHistoryProvider(conf: SparkConf) extends ApplicationHis
*/
private def replay(eventLog: FileStatus, bus: ReplayListenerBus): FsApplicationHistoryInfo = {
val logPath = eventLog.getPath()
logInfo(s"Replaying log path: $logPath")
val (logInput, sparkVersion) =
if (isLegacyLogDirectory(eventLog)) {
openLegacyEventLog(logPath)
Expand All @@ -256,7 +257,7 @@ private[history] class FsHistoryProvider(conf: SparkConf) extends ApplicationHis
try {
val appListener = new ApplicationEventListener
bus.addListener(appListener)
bus.replay(logInput, sparkVersion)
bus.replay(logInput, sparkVersion, logPath.toString)
new FsApplicationHistoryInfo(
logPath.getName(),
appListener.appId.getOrElse(logPath.getName()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -761,7 +761,7 @@ private[spark] class Master(
val ui = SparkUI.createHistoryUI(new SparkConf, replayBus, new SecurityManager(conf),
appName + " (completed)", HistoryServer.UI_PATH_PREFIX + s"/${app.id}")
try {
replayBus.replay(logInput, sparkVersion)
replayBus.replay(logInput, sparkVersion, eventLogFile)
} finally {
logInput.close()
}
Expand Down
Loading

0 comments on commit 196cd7a

Please sign in to comment.