diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 15565e19b3d24..096c35079b22e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -125,7 +125,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S * When this is called, `sparkProperties` is already filled with configs from the latter. */ private def mergeDefaultSparkProperties(): Unit = { - // Use common defaults file, if not specified by user // Honor --conf before the defaults file defaultSparkProperties.foreach { case (k, v) => if (!sparkProperties.contains(k)) { @@ -134,7 +133,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S } // Also load properties from `spark-defaults.conf` if they do not exist in the properties file - Option(Utils.getDefaultPropertiesFile(env)).foreach { filename => + val defaultSparkConf = Utils.getDefaultPropertiesFile(env) + Option(defaultSparkConf).foreach { filename => val properties = Utils.getPropertiesFromFile(filename) properties.foreach { case (k, v) => if (!sparkProperties.contains(k)) { @@ -142,6 +142,10 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S } } } + + if (propertiesFile == null) { + propertiesFile = defaultSparkConf + } } /** diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index f55c00d7d61a5..42373fae649be 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -1113,6 +1113,23 @@ class SparkSubmitSuite } } + test("SPARK-48392: Allow both spark-defaults.conf and properties file") { + forConfDir(Map("spark.executor.memory" -> "3g")) { path => + withPropertyFile("spark-conf.properties", Map("spark.executor.cores" -> "16")) { propsFile => + val unusedJar = TestUtils.createJarWithClasses(Seq.empty) + val args = Seq( + "--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"), + "--name", "testApp", + "--master", "local", + "--properties-file", propsFile, + unusedJar.toString) + val appArgs = new SparkSubmitArguments(args, env = Map("SPARK_CONF_DIR" -> path)) + appArgs.executorMemory should be("3g") + appArgs.executorCores should be("16") + } + } + } + test("support glob path") { withTempDir { tmpJarDir => withTempDir { tmpFileDir => @@ -1623,6 +1640,22 @@ class SparkSubmitSuite } } + private def withPropertyFile(fileName: String, conf: Map[String, String])(f: String => Unit) = { + withTempDir { tmpDir => + val props = new java.util.Properties() + val propsFile = File.createTempFile(fileName, "", tmpDir) + val propsOutputStream = new FileOutputStream(propsFile) + try { + conf.foreach { case (k, v) => props.put(k, v) } + props.store(propsOutputStream, "") + } finally { + propsOutputStream.close() + } + + f(propsFile.getPath) + } + } + private def updateConfWithFakeS3Fs(conf: Configuration): Unit = { conf.set("fs.s3a.impl", classOf[TestFileSystem].getCanonicalName) conf.set("fs.s3a.impl.disable.cache", "true") @@ -1694,40 +1727,31 @@ class SparkSubmitSuite val infixDelimFromFile = s"${delimKey}infixDelimFromFile" -> s"${CR}blah${LF}" val nonDelimSpaceFromFile = s"${delimKey}nonDelimSpaceFromFile" -> " blah\f" - val testProps = Seq(leadingDelimKeyFromFile, trailingDelimKeyFromFile, infixDelimFromFile, + val testProps = Map(leadingDelimKeyFromFile, trailingDelimKeyFromFile, infixDelimFromFile, nonDelimSpaceFromFile) - val props = new java.util.Properties() - val propsFile = File.createTempFile("test-spark-conf", ".properties", - Utils.createTempDir()) - val propsOutputStream = new FileOutputStream(propsFile) - try { - testProps.foreach { case (k, v) => props.put(k, v) } - props.store(propsOutputStream, "test whitespace") - } finally { - propsOutputStream.close() - } + withPropertyFile("test-spark-conf.properties", testProps) { propsFile => + val clArgs = Seq( + "--class", "org.SomeClass", + "--conf", s"${lineFeedFromCommandLine._1}=${lineFeedFromCommandLine._2}", + "--conf", "spark.master=yarn", + "--properties-file", propsFile, + "thejar.jar") - val clArgs = Seq( - "--class", "org.SomeClass", - "--conf", s"${lineFeedFromCommandLine._1}=${lineFeedFromCommandLine._2}", - "--conf", "spark.master=yarn", - "--properties-file", propsFile.getPath, - "thejar.jar") + val appArgs = new SparkSubmitArguments(clArgs) + val (_, _, conf, _) = submit.prepareSubmitEnvironment(appArgs) - val appArgs = new SparkSubmitArguments(clArgs) - val (_, _, conf, _) = submit.prepareSubmitEnvironment(appArgs) + Seq( + lineFeedFromCommandLine, + leadingDelimKeyFromFile, + trailingDelimKeyFromFile, + infixDelimFromFile + ).foreach { case (k, v) => + conf.get(k) should be (v) + } - Seq( - lineFeedFromCommandLine, - leadingDelimKeyFromFile, - trailingDelimKeyFromFile, - infixDelimFromFile - ).foreach { case (k, v) => - conf.get(k) should be (v) + conf.get(nonDelimSpaceFromFile._1) should be ("blah") } - - conf.get(nonDelimSpaceFromFile._1) should be ("blah") } test("get a Spark configuration from arguments") {