Skip to content

Commit

Permalink
Add package-info.java, clean up javadocs.
Browse files Browse the repository at this point in the history
Mostly play around with visibility modifiers so that we remove most of
the internal APIs from the generated javadoc. The remaining leakages
are because of CommandUtils.scala, which is in a different package
and thus needs some methods to be protected instead of package-private...
  • Loading branch information
Marcelo Vanzin committed Jan 15, 2015
1 parent f7cacff commit 44cd5f7
Show file tree
Hide file tree
Showing 8 changed files with 103 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,7 @@ object CommandUtils extends Logging {
private def buildCommandSeq(command: Command, memory: Int, sparkHome: String): Seq[String] = {
// SPARK-698: do not call the run.cmd script, as process.destroy()
// fails to kill a process tree on Windows
val cmd = new CommandLauncher(sparkHome, memory, command.environment)
.buildLauncherCommand(command.environment)
val cmd = new CommandLauncher(sparkHome, memory, command.environment).buildCommand()
cmd.toSeq ++ Seq(command.mainClass) ++ command.arguments
}

Expand Down Expand Up @@ -117,10 +116,8 @@ private class CommandLauncher(sparkHome: String, memoryMb: Int, env: Map[String,

setSparkHome(sparkHome)

override def buildLauncherCommand(env: JMap[String, String]): JList[String] = {
val cmd = buildJavaCommand()
cmd.add("-cp")
cmd.add(buildClassPath(null).mkString(File.pathSeparator))
def buildCommand(): JList[String] = {
val cmd = buildJavaCommand(null)
cmd.add(s"-Xms${memoryMb}M")
cmd.add(s"-Xmx${memoryMb}M")
addOptionString(cmd, getenv("SPARK_JAVA_OPTS"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,15 +42,15 @@ public abstract class AbstractLauncher<T extends AbstractLauncher> extends Launc

private static final String ENV_SPARK_HOME = "SPARK_HOME";
private static final String DEFAULT_PROPERTIES_FILE = "spark-defaults.conf";
protected static final String DEFAULT_MEM = "512m";
static final String DEFAULT_MEM = "512m";

protected String javaHome;
protected String sparkHome;
protected String propertiesFile;
protected final Map<String, String> conf;
protected final Map<String, String> launcherEnv;
String javaHome;
String sparkHome;
String propertiesFile;
final Map<String, String> conf;
final Map<String, String> launcherEnv;

protected AbstractLauncher() {
AbstractLauncher() {
this(Collections.<String, String>emptyMap());
}

Expand Down Expand Up @@ -95,10 +95,16 @@ public T setConf(String key, String value) {
/**
* Launchers should implement this to create the command to be executed. This method should
* also update the environment map with any environment variables needed by the child process.
* <p/>
* Note that this method is a no-op in the base class, even though subclasses in this package
* really must implement it. This approach was taken to allow this method to be package private
* while still allowing CommandUtils.scala to extend this class for its use.
*
* @param env Map containing environment variables to set for the Spark job.
*/
protected abstract List<String> buildLauncherCommand(Map<String, String> env) throws IOException;
List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
throw new UnsupportedOperationException("Subclasses must implement this method.");
}

/**
* Prepares the launcher command for execution from a shell script. This is used by the `Main`
Expand All @@ -115,7 +121,7 @@ List<String> buildShellCommand() throws IOException {
* user-specified properties file, or the spark-defaults.conf file under the Spark configuration
* directory.
*/
protected Properties loadPropertiesFile() throws IOException {
Properties loadPropertiesFile() throws IOException {
Properties props = new Properties();
File propsFile;
if (propertiesFile != null) {
Expand Down Expand Up @@ -144,14 +150,14 @@ protected Properties loadPropertiesFile() throws IOException {
return props;
}

protected String getSparkHome() {
String getSparkHome() {
String path = getenv(ENV_SPARK_HOME);
checkState(path != null,
"Spark home not found; set it explicitly or use the SPARK_HOME environment variable.");
return path;
}

protected List<String> buildJavaCommand() throws IOException {
protected List<String> buildJavaCommand(String extraClassPath) throws IOException {
List<String> cmd = new ArrayList<String>();
if (javaHome == null) {
cmd.add(join(File.separator, System.getProperty("java.home"), "bin", "java"));
Expand Down Expand Up @@ -180,6 +186,8 @@ protected List<String> buildJavaCommand() throws IOException {
}
}

cmd.add("-cp");
cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));
return cmd;
}

Expand All @@ -196,7 +204,7 @@ protected void addOptionString(List<String> cmd, String options) {
* each entry is formatted in the way expected by <i>java.net.URLClassLoader</i> (more
* specifically, with trailing slashes for directories).
*/
protected List<String> buildClassPath(String appClassPath) throws IOException {
List<String> buildClassPath(String appClassPath) throws IOException {
String sparkHome = getSparkHome();
String scala = getScalaVersion();

Expand Down Expand Up @@ -313,7 +321,7 @@ private void addToClassPath(List<String> cp, String entries) {
}
}

protected String getScalaVersion() {
String getScalaVersion() {
String scala = getenv("SPARK_SCALA_VERSION");
if (scala != null) {
return scala;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@ public class LauncherCommon {
public static final String EXECUTOR_CORES = "spark.executor.cores";

/** Returns whether the given string is null or empty. */
protected static boolean isEmpty(String s) {
static boolean isEmpty(String s) {
return s == null || s.isEmpty();
}

/** Joins a list of strings using the given separator. */
protected static String join(String sep, String... elements) {
static String join(String sep, String... elements) {
StringBuilder sb = new StringBuilder();
for (String e : elements) {
if (e != null) {
Expand All @@ -70,7 +70,7 @@ protected static String join(String sep, String... elements) {
}

/** Joins a list of strings using the given separator. */
protected static String join(String sep, Iterable<String> elements) {
static String join(String sep, Iterable<String> elements) {
StringBuilder sb = new StringBuilder();
for (String e : elements) {
if (e != null) {
Expand All @@ -84,7 +84,7 @@ protected static String join(String sep, Iterable<String> elements) {
}

/** Returns the first value mapped to the given key in the given maps. */
protected static String find(String key, Map<?, ?>... maps) {
static String find(String key, Map<?, ?>... maps) {
for (Map<?, ?> map : maps) {
String value = (String) map.get(key);
if (!isEmpty(value)) {
Expand All @@ -95,7 +95,7 @@ protected static String find(String key, Map<?, ?>... maps) {
}

/** Returns the first non-empty, non-null string in the given list. */
protected static String firstNonEmpty(String... candidates) {
static String firstNonEmpty(String... candidates) {
for (String s : candidates) {
if (!isEmpty(s)) {
return s;
Expand All @@ -105,7 +105,7 @@ protected static String firstNonEmpty(String... candidates) {
}

/** Returns the name of the env variable that holds the native library path. */
protected static String getLibPathEnvName() {
static String getLibPathEnvName() {
if (isWindows()) {
return "PATH";
}
Expand All @@ -119,7 +119,7 @@ protected static String getLibPathEnvName() {
}

/** Returns whether the OS is Windows. */
protected static boolean isWindows() {
static boolean isWindows() {
String os = System.getProperty("os.name");
return os.startsWith("Windows");
}
Expand All @@ -128,7 +128,7 @@ protected static boolean isWindows() {
* Updates the user environment to contain the merged value of "envKey" after appending
* the given path list.
*/
protected static void mergeEnvPathList(Map<String, String> userEnv, String envKey, String pathList) {
static void mergeEnvPathList(Map<String, String> userEnv, String envKey, String pathList) {
if (!isEmpty(pathList)) {
String current = firstNonEmpty(userEnv.get(envKey), System.getenv(envKey));
userEnv.put(envKey, join(File.pathSeparator, current, pathList));
Expand All @@ -142,7 +142,7 @@ protected static void mergeEnvPathList(Map<String, String> userEnv, String envKe
* Input: "\"ab cd\" efgh 'i \" j'"
* Output: [ "ab cd", "efgh", "i \" j" ]
*/
protected static List<String> parseOptionString(String s) {
static List<String> parseOptionString(String s) {
List<String> opts = new ArrayList<String>();
StringBuilder opt = new StringBuilder();
boolean inOpt = false;
Expand Down Expand Up @@ -224,21 +224,21 @@ protected static List<String> parseOptionString(String s) {
}

/** Throws IllegalArgumentException if the given object is null. */
protected static void checkNotNull(Object o, String arg) {
static void checkNotNull(Object o, String arg) {
if (o == null) {
throw new IllegalArgumentException(String.format("'%s' must not be null.", arg));
}
}

/** Throws IllegalArgumentException with the given message if the check is false. */
protected static void checkArgument(boolean check, String msg, Object... args) {
static void checkArgument(boolean check, String msg, Object... args) {
if (!check) {
throw new IllegalArgumentException(String.format(msg, args));
}
}

/** Throws IllegalStateException with the given message if the check is false. */
protected static void checkState(boolean check, String msg, Object... args) {
static void checkState(boolean check, String msg, Object... args) {
if (!check) {
throw new IllegalStateException(String.format(msg, args));
}
Expand Down
4 changes: 2 additions & 2 deletions launcher/src/main/java/org/apache/spark/launcher/Main.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
/**
* Command line interface for the Spark launcher. Used internally by Spark scripts.
*/
public class Main extends LauncherCommon {
class Main extends LauncherCommon {

/**
* Usage: Main [class] [class args]
Expand Down Expand Up @@ -97,7 +97,7 @@ public static void main(String[] argsArray) throws Exception {
private static class UsageLauncher extends AbstractLauncher<UsageLauncher> {

@Override
protected List<String> buildLauncherCommand(Map<String, String> env) {
List<String> buildLauncherCommand(Map<String, String> env) {
if (isWindows()) {
return Arrays.asList("set SPARK_LAUNCHER_USAGE_ERROR=1");
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class SparkClassLauncher extends AbstractLauncher<SparkClassLauncher> {
}

@Override
protected List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
List<String> javaOptsKeys = new ArrayList<String>();
String memKey = null;
String extraClassPath = null;
Expand Down Expand Up @@ -91,16 +91,14 @@ protected List<String> buildLauncherCommand(Map<String, String> env) throws IOEx
return buildSparkSubmitCommand(env);
}

List<String> cmd = buildJavaCommand();
List<String> cmd = buildJavaCommand(extraClassPath);
for (String key : javaOptsKeys) {
addOptionString(cmd, System.getenv(key));
}

String mem = firstNonEmpty(memKey != null ? System.getenv(memKey) : null, DEFAULT_MEM);
cmd.add("-Xms" + mem);
cmd.add("-Xmx" + mem);
cmd.add("-cp");
cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));
cmd.add(className);
cmd.addAll(classArgs);
return cmd;
Expand Down
38 changes: 16 additions & 22 deletions launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,25 +31,22 @@
* Use this class to start Spark applications programmatically. The class uses a builder pattern
* to allow clients to configure the Spark application and launch it as a child process.
* <p/>
* There's also support for running the application on a separate thread, although that is to
* be considered experimental and avoided in production environments.
* <p/>
* Note that launching Spark applications using this class will not automatically load environment
* variables from the "spark-env.sh" or "spark-env.cmd" scripts in the configuration directory.
*/
public class SparkLauncher extends AbstractLauncher<SparkLauncher> {

protected boolean verbose;
protected String appName;
protected String master;
protected String deployMode;
protected String mainClass;
protected String appResource;
protected final List<String> sparkArgs;
protected final List<String> appArgs;
protected final List<String> jars;
protected final List<String> files;
protected final List<String> pyFiles;
boolean verbose;
String appName;
String master;
String deployMode;
String mainClass;
String appResource;
final List<String> sparkArgs;
final List<String> appArgs;
final List<String> jars;
final List<String> files;
final List<String> pyFiles;

public SparkLauncher() {
this.sparkArgs = new ArrayList<String>();
Expand Down Expand Up @@ -218,20 +215,17 @@ List<String> buildSparkSubmitArgs() {
}

@Override
protected List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
List<String> cmd = buildJavaCommand();
addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS"));
addOptionString(cmd, System.getenv("SPARK_JAVA_OPTS"));

List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
// Load the properties file and check whether spark-submit will be running the app's driver
// or just launching a cluster app. When running the driver, the JVM's argument will be
// modified to cover the driver's configuration.
Properties props = loadPropertiesFile();
boolean isClientMode = isClientMode(props);

String extraClassPath = isClientMode ? find(DRIVER_EXTRA_CLASSPATH, conf, props) : null;
cmd.add("-cp");
cmd.add(join(File.pathSeparator, buildClassPath(extraClassPath)));

List<String> cmd = buildJavaCommand(extraClassPath);
addOptionString(cmd, System.getenv("SPARK_SUBMIT_OPTS"));
addOptionString(cmd, System.getenv("SPARK_JAVA_OPTS"));

if (isClientMode) {
// Figuring out where the memory value come from is a little tricky due to precedence.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
* <p/>
* This class has also some special features to aid PySparkLauncher.
*/
public class SparkSubmitCliLauncher extends SparkLauncher {
class SparkSubmitCliLauncher extends SparkLauncher {

/**
* Name of the app resource used to identify the PySpark shell. The command line parser expects
Expand Down Expand Up @@ -83,7 +83,7 @@ public class SparkSubmitCliLauncher extends SparkLauncher {
}

@Override
protected List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
List<String> buildLauncherCommand(Map<String, String> env) throws IOException {
if (PYSPARK_SHELL.equals(appResource)) {
return buildPySparkShellCommand(env);
} else {
Expand Down
45 changes: 45 additions & 0 deletions launcher/src/main/java/org/apache/spark/launcher/package-info.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

/**
* Library for launching Spark applications.
* <p/>
* This library allows applications to launch Spark programmatically. There's only one entry
* point to the library - the {@link org.apache.spark.launcher.SparkLauncher} class.
* <p/>
* To launch a Spark application, just instantiate a {@link org.apache.spark.launcher.SparkLauncher}
* and configure the application to run. For example:
*
* <pre>
* {@code
* import org.apache.spark.launcher.SparkLauncher;
*
* public class MyLauncher {
* public static void main(String[] args) throws Exception {
* Process spark = new SparkLauncher()
* .setAppResource("/my/app.jar")
* .setMainClass("my.spark.app.Main")
* .setMaster("local")
* .setConf(SparkLauncher.DRIVER_MEMORY, "2g")
* .launch();
* spark.waitFor();
* }
* }
* }
* </pre>
*/
package org.apache.spark.launcher;

0 comments on commit 44cd5f7

Please sign in to comment.