Skip to content

Commit

Permalink
fix test failure and add a new test
Browse files Browse the repository at this point in the history
  • Loading branch information
sunchao committed May 28, 2024
1 parent 6c5861b commit efec497
Show file tree
Hide file tree
Showing 2 changed files with 58 additions and 30 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
* When this is called, `sparkProperties` is already filled with configs from the latter.
*/
private def mergeDefaultSparkProperties(): Unit = {
// Use common defaults file, if not specified by user
// Honor --conf before the defaults file
defaultSparkProperties.foreach { case (k, v) =>
if (!sparkProperties.contains(k)) {
Expand All @@ -134,14 +133,19 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
}

// Also load properties from `spark-defaults.conf` if they do not exist in the properties file
Option(Utils.getDefaultPropertiesFile(env)).foreach { filename =>
val defaultSparkConf = Utils.getDefaultPropertiesFile(env)
Option(defaultSparkConf).foreach { filename =>
val properties = Utils.getPropertiesFromFile(filename)
properties.foreach { case (k, v) =>
if (!sparkProperties.contains(k)) {
sparkProperties(k) = v
}
}
}

if (propertiesFile == null) {
propertiesFile = defaultSparkConf
}
}

/**
Expand Down
80 changes: 52 additions & 28 deletions core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1113,6 +1113,23 @@ class SparkSubmitSuite
}
}

test("SPARK-48392: Allow both spark-defaults.conf and properties file") {
forConfDir(Map("spark.executor.memory" -> "3g")) { path =>
withPropertyFile("spark-conf.properties", Map("spark.executor.cores" -> "16")) { propsFile =>
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val args = Seq(
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local",
"--properties-file", propsFile,
unusedJar.toString)
val appArgs = new SparkSubmitArguments(args, env = Map("SPARK_CONF_DIR" -> path))
appArgs.executorMemory should be("3g")
appArgs.executorCores should be("16")
}
}
}

test("support glob path") {
withTempDir { tmpJarDir =>
withTempDir { tmpFileDir =>
Expand Down Expand Up @@ -1623,6 +1640,22 @@ class SparkSubmitSuite
}
}

private def withPropertyFile(fileName: String, conf: Map[String, String])(f: String => Unit) = {
withTempDir { tmpDir =>
val props = new java.util.Properties()
val propsFile = File.createTempFile(fileName, "", tmpDir)
val propsOutputStream = new FileOutputStream(propsFile)
try {
conf.foreach { case (k, v) => props.put(k, v) }
props.store(propsOutputStream, "")
} finally {
propsOutputStream.close()
}

f(propsFile.getPath)
}
}

private def updateConfWithFakeS3Fs(conf: Configuration): Unit = {
conf.set("fs.s3a.impl", classOf[TestFileSystem].getCanonicalName)
conf.set("fs.s3a.impl.disable.cache", "true")
Expand Down Expand Up @@ -1694,40 +1727,31 @@ class SparkSubmitSuite
val infixDelimFromFile = s"${delimKey}infixDelimFromFile" -> s"${CR}blah${LF}"
val nonDelimSpaceFromFile = s"${delimKey}nonDelimSpaceFromFile" -> " blah\f"

val testProps = Seq(leadingDelimKeyFromFile, trailingDelimKeyFromFile, infixDelimFromFile,
val testProps = Map(leadingDelimKeyFromFile, trailingDelimKeyFromFile, infixDelimFromFile,
nonDelimSpaceFromFile)

val props = new java.util.Properties()
val propsFile = File.createTempFile("test-spark-conf", ".properties",
Utils.createTempDir())
val propsOutputStream = new FileOutputStream(propsFile)
try {
testProps.foreach { case (k, v) => props.put(k, v) }
props.store(propsOutputStream, "test whitespace")
} finally {
propsOutputStream.close()
}
withPropertyFile("test-spark-conf.properties", testProps) { propsFile =>
val clArgs = Seq(
"--class", "org.SomeClass",
"--conf", s"${lineFeedFromCommandLine._1}=${lineFeedFromCommandLine._2}",
"--conf", "spark.master=yarn",
"--properties-file", propsFile,
"thejar.jar")

val clArgs = Seq(
"--class", "org.SomeClass",
"--conf", s"${lineFeedFromCommandLine._1}=${lineFeedFromCommandLine._2}",
"--conf", "spark.master=yarn",
"--properties-file", propsFile.getPath,
"thejar.jar")
val appArgs = new SparkSubmitArguments(clArgs)
val (_, _, conf, _) = submit.prepareSubmitEnvironment(appArgs)

val appArgs = new SparkSubmitArguments(clArgs)
val (_, _, conf, _) = submit.prepareSubmitEnvironment(appArgs)
Seq(
lineFeedFromCommandLine,
leadingDelimKeyFromFile,
trailingDelimKeyFromFile,
infixDelimFromFile
).foreach { case (k, v) =>
conf.get(k) should be (v)
}

Seq(
lineFeedFromCommandLine,
leadingDelimKeyFromFile,
trailingDelimKeyFromFile,
infixDelimFromFile
).foreach { case (k, v) =>
conf.get(k) should be (v)
conf.get(nonDelimSpaceFromFile._1) should be ("blah")
}

conf.get(nonDelimSpaceFromFile._1) should be ("blah")
}

test("get a Spark configuration from arguments") {
Expand Down

0 comments on commit efec497

Please sign in to comment.