diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 77de0314f8f6b..3bdb088ac2253 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -286,11 +286,11 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S if (master.startsWith("yarn")) { val hasHadoopEnv = env.contains("HADOOP_CONF_DIR") || env.contains("YARN_CONF_DIR") - val hasHadoopProp = sparkProperties.contains("spark.yarn.conf.dir") + val hasHadoopProp = sparkProperties.contains("spark.hadoop.conf.dir") if (!hasHadoopEnv && !hasHadoopProp && !Utils.isTesting) { error(s"When running with master '$master' " + "either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment, +" + - "or spark.yarn.conf.dir in the spark properties.") + "or spark.hadoop.conf.dir in the spark properties.") } } diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java index ead48d3df1c2c..d1af09c5bfa2d 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java @@ -200,7 +200,7 @@ List buildClassPath(String appClassPath) throws IOException { addToClassPath(cp, getenv("HADOOP_CONF_DIR")); addToClassPath(cp, getenv("YARN_CONF_DIR")); - addToClassPath(cp, getEffectiveConfig().get("spark.yarn.conf.dir")); + addToClassPath(cp, getEffectiveConfig().get("spark.hadoop.conf.dir")); addToClassPath(cp, getenv("SPARK_DIST_CLASSPATH")); return new ArrayList<>(cp); } diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala index 8e5e7f563b836..2ac2a62f6bc74 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala @@ -695,7 +695,7 @@ private[spark] class Client( } val confDirsEnvKeys = Seq("HADOOP_CONF_DIR", "YARN_CONF_DIR") - val configDirProp = sparkConf.getOption("spark.yarn.conf.dir") + val configDirProp = sparkConf.getOption("spark.hadoop.conf.dir") val confDirPaths = (confDirsEnvKeys.map(sys.env.get) :+ configDirProp).flatMap(_.toList) confDirPaths.foreach { path => diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala index 816e8a2138a28..8dd1e97ef4aed 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala @@ -133,7 +133,7 @@ class YarnClusterSuite extends BaseYarnClusterSuite { appArgs = Seq("key=value", "spark.test.key=testvalue", result.getAbsolutePath()), extraConf = Map( "spark.hadoop.key" -> "value", - "spark.yarn.conf.dir" -> customConf.getAbsolutePath)) + "spark.hadoop.conf.dir" -> customConf.getAbsolutePath)) checkResult(finalState, result) }