From d8a5a126e244d28163c7db6a519773940fe13504 Mon Sep 17 00:00:00 2001 From: Kanwaljit Singh Date: Wed, 30 Jul 2014 15:03:20 +0530 Subject: [PATCH] SPARK-2641: Fixing how spark arguments are loaded from properties file for num executors --- .../scala/org/apache/spark/deploy/SparkSubmitArguments.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 57655aa4c32b1..04f686b2c24e6 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -105,6 +105,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { .getOrElse(defaultProperties.get("spark.cores.max").orNull) name = Option(name).getOrElse(defaultProperties.get("spark.app.name").orNull) jars = Option(jars).getOrElse(defaultProperties.get("spark.jars").orNull) + numExecutors = Option(numExecutors) + .getOrElse(defaultProperties.get("spark.executor.instances").orNull) // This supports env vars in older versions of Spark master = Option(master).getOrElse(System.getenv("MASTER"))