Skip to content

Commit f8a95e9

Browse files
committed
xxx
1 parent 6c5861b commit f8a95e9

File tree

2 files changed

+58
-30
lines changed

2 files changed

+58
-30
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
125125
* When this is called, `sparkProperties` is already filled with configs from the latter.
126126
*/
127127
private def mergeDefaultSparkProperties(): Unit = {
128-
// Use common defaults file, if not specified by user
129128
// Honor --conf before the defaults file
130129
defaultSparkProperties.foreach { case (k, v) =>
131130
if (!sparkProperties.contains(k)) {
@@ -134,14 +133,19 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
134133
}
135134

136135
// Also load properties from `spark-defaults.conf` if they do not exist in the properties file
137-
Option(Utils.getDefaultPropertiesFile(env)).foreach { filename =>
136+
val defaultSparkConf = Utils.getDefaultPropertiesFile(env)
137+
Option(defaultSparkConf).foreach { filename =>
138138
val properties = Utils.getPropertiesFromFile(filename)
139139
properties.foreach { case (k, v) =>
140140
if (!sparkProperties.contains(k)) {
141141
sparkProperties(k) = v
142142
}
143143
}
144144
}
145+
146+
if (propertiesFile == null) {
147+
propertiesFile = defaultSparkConf
148+
}
145149
}
146150

147151
/**

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 52 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1113,6 +1113,23 @@ class SparkSubmitSuite
11131113
}
11141114
}
11151115

1116+
test("SPARK-48392: Allow both spark-defaults.conf and properties file") {
1117+
forConfDir(Map("spark.executor.memory" -> "3g")) { path =>
1118+
withPropertyFile("spark-conf.properties", Map("spark.executor.cores" -> "16")) { propsFile =>
1119+
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
1120+
val args = Seq(
1121+
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
1122+
"--name", "testApp",
1123+
"--master", "local",
1124+
"--properties-file", propsFile,
1125+
unusedJar.toString)
1126+
val appArgs = new SparkSubmitArguments(args, env = Map("SPARK_CONF_DIR" -> path))
1127+
appArgs.executorMemory should be("3g")
1128+
appArgs.executorCores should be("16")
1129+
}
1130+
}
1131+
}
1132+
11161133
test("support glob path") {
11171134
withTempDir { tmpJarDir =>
11181135
withTempDir { tmpFileDir =>
@@ -1623,6 +1640,22 @@ class SparkSubmitSuite
16231640
}
16241641
}
16251642

1643+
private def withPropertyFile(fileName: String, conf: Map[String, String])(f: String => Unit) = {
1644+
withTempDir { tmpDir =>
1645+
val props = new java.util.Properties()
1646+
val propsFile = File.createTempFile(fileName, "", tmpDir)
1647+
val propsOutputStream = new FileOutputStream(propsFile)
1648+
try {
1649+
conf.foreach { case (k, v) => props.put(k, v) }
1650+
props.store(propsOutputStream, "")
1651+
} finally {
1652+
propsOutputStream.close()
1653+
}
1654+
1655+
f(propsFile.getPath)
1656+
}
1657+
}
1658+
16261659
private def updateConfWithFakeS3Fs(conf: Configuration): Unit = {
16271660
conf.set("fs.s3a.impl", classOf[TestFileSystem].getCanonicalName)
16281661
conf.set("fs.s3a.impl.disable.cache", "true")
@@ -1694,40 +1727,31 @@ class SparkSubmitSuite
16941727
val infixDelimFromFile = s"${delimKey}infixDelimFromFile" -> s"${CR}blah${LF}"
16951728
val nonDelimSpaceFromFile = s"${delimKey}nonDelimSpaceFromFile" -> " blah\f"
16961729

1697-
val testProps = Seq(leadingDelimKeyFromFile, trailingDelimKeyFromFile, infixDelimFromFile,
1730+
val testProps = Map(leadingDelimKeyFromFile, trailingDelimKeyFromFile, infixDelimFromFile,
16981731
nonDelimSpaceFromFile)
16991732

1700-
val props = new java.util.Properties()
1701-
val propsFile = File.createTempFile("test-spark-conf", ".properties",
1702-
Utils.createTempDir())
1703-
val propsOutputStream = new FileOutputStream(propsFile)
1704-
try {
1705-
testProps.foreach { case (k, v) => props.put(k, v) }
1706-
props.store(propsOutputStream, "test whitespace")
1707-
} finally {
1708-
propsOutputStream.close()
1709-
}
1733+
withPropertyFile("test-spark-conf.properties", testProps) { propsFile =>
1734+
val clArgs = Seq(
1735+
"--class", "org.SomeClass",
1736+
"--conf", s"${lineFeedFromCommandLine._1}=${lineFeedFromCommandLine._2}",
1737+
"--conf", "spark.master=yarn",
1738+
"--properties-file", propsFile,
1739+
"thejar.jar")
17101740

1711-
val clArgs = Seq(
1712-
"--class", "org.SomeClass",
1713-
"--conf", s"${lineFeedFromCommandLine._1}=${lineFeedFromCommandLine._2}",
1714-
"--conf", "spark.master=yarn",
1715-
"--properties-file", propsFile.getPath,
1716-
"thejar.jar")
1741+
val appArgs = new SparkSubmitArguments(clArgs)
1742+
val (_, _, conf, _) = submit.prepareSubmitEnvironment(appArgs)
17171743

1718-
val appArgs = new SparkSubmitArguments(clArgs)
1719-
val (_, _, conf, _) = submit.prepareSubmitEnvironment(appArgs)
1744+
Seq(
1745+
lineFeedFromCommandLine,
1746+
leadingDelimKeyFromFile,
1747+
trailingDelimKeyFromFile,
1748+
infixDelimFromFile
1749+
).foreach { case (k, v) =>
1750+
conf.get(k) should be (v)
1751+
}
17201752

1721-
Seq(
1722-
lineFeedFromCommandLine,
1723-
leadingDelimKeyFromFile,
1724-
trailingDelimKeyFromFile,
1725-
infixDelimFromFile
1726-
).foreach { case (k, v) =>
1727-
conf.get(k) should be (v)
1753+
conf.get(nonDelimSpaceFromFile._1) should be ("blah")
17281754
}
1729-
1730-
conf.get(nonDelimSpaceFromFile._1) should be ("blah")
17311755
}
17321756

17331757
test("get a Spark configuration from arguments") {

0 commit comments

Comments
 (0)