Skip to content

Commit 94fe365

Browse files
committed
initial commit
1 parent 4360ec7 commit 94fe365

File tree

3 files changed

+61
-40
lines changed

3 files changed

+61
-40
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 34 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
5050
var executorCores: String = null
5151
var totalExecutorCores: String = null
5252
var propertiesFile: String = null
53+
private var extraPropertiesFiles: Boolean = false
5354
var driverMemory: String = null
5455
var driverExtraClassPath: String = null
5556
var driverExtraLibraryPath: String = null
@@ -87,27 +88,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
8788

8889
override protected def logName: String = classOf[SparkSubmitArguments].getName
8990

90-
/** Default properties present in the currently defined defaults file. */
91-
lazy val defaultSparkProperties: HashMap[String, String] = {
92-
val defaultProperties = new HashMap[String, String]()
93-
if (verbose) {
94-
logInfo(log"Using properties file: ${MDC(PATH, propertiesFile)}")
95-
}
96-
Option(propertiesFile).foreach { filename =>
97-
val properties = Utils.getPropertiesFromFile(filename)
98-
properties.foreach { case (k, v) =>
99-
defaultProperties(k) = v
100-
}
101-
// Property files may contain sensitive information, so redact before printing
102-
if (verbose) {
103-
Utils.redact(properties).foreach { case (k, v) =>
104-
logInfo(log"Adding default property: ${MDC(KEY, k)}=${MDC(VALUE, v)}")
105-
}
106-
}
107-
}
108-
defaultProperties
109-
}
110-
11191
// Set parameters from command line arguments
11292
parse(args.asJava)
11393

@@ -123,31 +103,43 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
123103
validateArguments()
124104

125105
/**
126-
* Merge values from the default properties file with those specified through --conf.
127-
* When this is called, `sparkProperties` is already filled with configs from the latter.
106+
* Load properties from the file with the given path into `sparkProperties`.
107+
* No-op if the file path is null
128108
*/
129-
private def mergeDefaultSparkProperties(): Unit = {
130-
// Honor --conf before the specified properties file and defaults file
131-
defaultSparkProperties.foreach { case (k, v) =>
132-
if (!sparkProperties.contains(k)) {
133-
sparkProperties(k) = v
109+
private def loadPropertiesFromFile(filePath: String): Unit = {
110+
if (filePath != null) {
111+
if (verbose) {
112+
logInfo(log"Using properties file: ${MDC(PATH, filePath)}")
134113
}
135-
}
136-
137-
// Also load properties from `spark-defaults.conf` if they do not exist in the properties file
138-
// and --conf list
139-
val defaultSparkConf = Utils.getDefaultPropertiesFile(env)
140-
Option(defaultSparkConf).foreach { filename =>
141-
val properties = Utils.getPropertiesFromFile(filename)
114+
val properties = Utils.getPropertiesFromFile(filePath)
142115
properties.foreach { case (k, v) =>
143116
if (!sparkProperties.contains(k)) {
144117
sparkProperties(k) = v
145118
}
146119
}
120+
// Property files may contain sensitive information, so redact before printing
121+
if (verbose) {
122+
Utils.redact(properties).foreach { case (k, v) =>
123+
logInfo(log"Adding default property: ${MDC(KEY, k)}=${MDC(VALUE, v)}")
124+
}
125+
}
147126
}
127+
}
148128

149-
if (propertiesFile == null) {
150-
propertiesFile = defaultSparkConf
129+
/**
130+
* Merge values from the default properties file with those specified through --conf.
131+
* When this is called, `sparkProperties` is already filled with configs from the latter.
132+
*/
133+
private def mergeDefaultSparkProperties(): Unit = {
134+
// Honor --conf before the specified properties file and defaults file
135+
loadPropertiesFromFile(propertiesFile)
136+
137+
// Also load properties from `spark-defaults.conf` if they do not exist in the properties file
138+
// and --conf list when:
139+
// - no input properties file is specified
140+
// - input properties file is specified, but `--extra-properties-files` flag is set
141+
if (propertiesFile == null || extraPropertiesFiles) {
142+
loadPropertiesFromFile(Utils.getDefaultPropertiesFile(env))
151143
}
152144
}
153145

@@ -405,6 +397,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
405397
case PROPERTIES_FILE =>
406398
propertiesFile = value
407399

400+
case EXTRA_PROPERTIES_FILES =>
401+
extraPropertiesFiles = true
402+
408403
case KILL_SUBMISSION =>
409404
submissionToKill = value
410405
if (action != null) {
@@ -548,6 +543,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
548543
| --conf, -c PROP=VALUE Arbitrary Spark configuration property.
549544
| --properties-file FILE Path to a file from which to load extra properties. If not
550545
| specified, this will look for conf/spark-defaults.conf.
546+
| --extra-properties-files Whether to load properties from conf/spark-defaults.conf,
547+
| even if --properties-file is specified.
551548
|
552549
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
553550
| --driver-java-options Extra Java options to pass to the driver.

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 25 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1113,19 +1113,41 @@ class SparkSubmitSuite
11131113
}
11141114
}
11151115

1116-
test("SPARK-48392: Allow both spark-defaults.conf and properties file") {
1117-
forConfDir(Map("spark.executor.memory" -> "3g")) { path =>
1118-
withPropertyFile("spark-conf.properties", Map("spark.executor.cores" -> "16")) { propsFile =>
1116+
test("SPARK-48392: load spark-defaults.conf when --extra-properties-files is set") {
1117+
forConfDir(Map("spark.executor.memory" -> "3g", "spark.driver.memory" -> "3g")) { path =>
1118+
withPropertyFile("spark-conf.properties",
1119+
Map("spark.executor.cores" -> "16", "spark.driver.memory" -> "4g")) { propsFile =>
11191120
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
11201121
val args = Seq(
11211122
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
11221123
"--name", "testApp",
11231124
"--master", "local",
11241125
"--properties-file", propsFile,
1126+
"--extra-properties-files",
11251127
unusedJar.toString)
11261128
val appArgs = new SparkSubmitArguments(args, env = Map("SPARK_CONF_DIR" -> path))
1129+
appArgs.executorCores should be("16")
11271130
appArgs.executorMemory should be("3g")
1131+
appArgs.driverMemory should be("4g")
1132+
}
1133+
}
1134+
}
1135+
1136+
test("SPARK-48392: should skip spark-defaults.conf when --extra-properties-files is not set") {
1137+
forConfDir(Map("spark.executor.memory" -> "3g", "spark.driver.memory" -> "3g")) { path =>
1138+
withPropertyFile("spark-conf.properties",
1139+
Map("spark.executor.cores" -> "16", "spark.driver.memory" -> "4g")) { propsFile =>
1140+
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
1141+
val args = Seq(
1142+
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
1143+
"--name", "testApp",
1144+
"--master", "local",
1145+
"--properties-file", propsFile,
1146+
unusedJar.toString)
1147+
val appArgs = new SparkSubmitArguments(args, env = Map("SPARK_CONF_DIR" -> path))
11281148
appArgs.executorCores should be("16")
1149+
appArgs.driverMemory should be("4g")
1150+
appArgs.executorMemory should be(null)
11291151
}
11301152
}
11311153
}

launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ class SparkSubmitOptionParser {
5555
protected final String PACKAGES = "--packages";
5656
protected final String PACKAGES_EXCLUDE = "--exclude-packages";
5757
protected final String PROPERTIES_FILE = "--properties-file";
58+
protected final String EXTRA_PROPERTIES_FILES = "--extra-properties-files";
5859
protected final String PROXY_USER = "--proxy-user";
5960
protected final String PY_FILES = "--py-files";
6061
protected final String REPOSITORIES = "--repositories";
@@ -130,6 +131,7 @@ class SparkSubmitOptionParser {
130131
{ USAGE_ERROR },
131132
{ VERBOSE, "-v" },
132133
{ VERSION },
134+
{ EXTRA_PROPERTIES_FILES },
133135
};
134136

135137
/**

0 commit comments

Comments
 (0)