Skip to content

Commit ee05df4

Browse files
committed
initial commit
1 parent eac413a commit ee05df4

File tree

3 files changed

+61
-40
lines changed

3 files changed

+61
-40
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala

Lines changed: 34 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
5050
var executorCores: String = null
5151
var totalExecutorCores: String = null
5252
var propertiesFile: String = null
53+
private var extraPropertiesFile: Boolean = false
5354
var driverMemory: String = null
5455
var driverExtraClassPath: String = null
5556
var driverExtraLibraryPath: String = null
@@ -85,27 +86,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
8586
var submissionToRequestStatusFor: String = null
8687
var useRest: Boolean = false // used internally
8788

88-
/** Default properties present in the currently defined defaults file. */
89-
lazy val defaultSparkProperties: HashMap[String, String] = {
90-
val defaultProperties = new HashMap[String, String]()
91-
if (verbose) {
92-
logInfo(log"Using properties file: ${MDC(PATH, propertiesFile)}")
93-
}
94-
Option(propertiesFile).foreach { filename =>
95-
val properties = Utils.getPropertiesFromFile(filename)
96-
properties.foreach { case (k, v) =>
97-
defaultProperties(k) = v
98-
}
99-
// Property files may contain sensitive information, so redact before printing
100-
if (verbose) {
101-
Utils.redact(properties).foreach { case (k, v) =>
102-
logInfo(log"Adding default property: ${MDC(KEY, k)}=${MDC(VALUE, v)}")
103-
}
104-
}
105-
}
106-
defaultProperties
107-
}
108-
10989
// Set parameters from command line arguments
11090
parse(args.asJava)
11191

@@ -121,31 +101,43 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
121101
validateArguments()
122102

123103
/**
124-
* Merge values from the default properties file with those specified through --conf.
125-
* When this is called, `sparkProperties` is already filled with configs from the latter.
104+
* Load properties from the file with the given path into `sparkProperties`.
105+
* No-op if the file path is null
126106
*/
127-
private def mergeDefaultSparkProperties(): Unit = {
128-
// Honor --conf before the specified properties file and defaults file
129-
defaultSparkProperties.foreach { case (k, v) =>
130-
if (!sparkProperties.contains(k)) {
131-
sparkProperties(k) = v
107+
private def loadPropertiesFromFile(filePath: String): Unit = {
108+
if (filePath != null) {
109+
if (verbose) {
110+
logInfo(log"Using properties file: ${MDC(PATH, filePath)}")
132111
}
133-
}
134-
135-
// Also load properties from `spark-defaults.conf` if they do not exist in the properties file
136-
// and --conf list
137-
val defaultSparkConf = Utils.getDefaultPropertiesFile(env)
138-
Option(defaultSparkConf).foreach { filename =>
139-
val properties = Utils.getPropertiesFromFile(filename)
112+
val properties = Utils.getPropertiesFromFile(filePath)
140113
properties.foreach { case (k, v) =>
141114
if (!sparkProperties.contains(k)) {
142115
sparkProperties(k) = v
143116
}
144117
}
118+
// Property files may contain sensitive information, so redact before printing
119+
if (verbose) {
120+
Utils.redact(properties).foreach { case (k, v) =>
121+
logInfo(log"Adding default property: ${MDC(KEY, k)}=${MDC(VALUE, v)}")
122+
}
123+
}
145124
}
125+
}
146126

147-
if (propertiesFile == null) {
148-
propertiesFile = defaultSparkConf
127+
/**
128+
* Merge values from the default properties file with those specified through --conf.
129+
* When this is called, `sparkProperties` is already filled with configs from the latter.
130+
*/
131+
private def mergeDefaultSparkProperties(): Unit = {
132+
// Honor --conf before the specified properties file and defaults file
133+
loadPropertiesFromFile(propertiesFile)
134+
135+
// Also load properties from `spark-defaults.conf` if they do not exist in the properties file
136+
// and --conf list when:
137+
// - no input properties file is specified
138+
// - input properties file is specified, but `--extra-properties-files` flag is set
139+
if (propertiesFile == null || extraPropertiesFile) {
140+
loadPropertiesFromFile(Utils.getDefaultPropertiesFile(env))
149141
}
150142
}
151143

@@ -403,6 +395,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
403395
case PROPERTIES_FILE =>
404396
propertiesFile = value
405397

398+
case EXTRA_PROPERTIES_FILES =>
399+
extraPropertiesFile = true
400+
406401
case KILL_SUBMISSION =>
407402
submissionToKill = value
408403
if (action != null) {
@@ -546,6 +541,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
546541
| --conf, -c PROP=VALUE Arbitrary Spark configuration property.
547542
| --properties-file FILE Path to a file from which to load extra properties. If not
548543
| specified, this will look for conf/spark-defaults.conf.
544+
| --extra-properties-files Whether to load properties from conf/spark-defaults.conf,
545+
| even if --properties-file is specified.
549546
|
550547
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
551548
| --driver-java-options Extra Java options to pass to the driver.

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 25 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1113,19 +1113,41 @@ class SparkSubmitSuite
11131113
}
11141114
}
11151115

1116-
test("SPARK-48392: Allow both spark-defaults.conf and properties file") {
1117-
forConfDir(Map("spark.executor.memory" -> "3g")) { path =>
1118-
withPropertyFile("spark-conf.properties", Map("spark.executor.cores" -> "16")) { propsFile =>
1116+
test("SPARK-48392: load spark-defaults.conf when --extra-properties-files is set") {
1117+
forConfDir(Map("spark.executor.memory" -> "3g", "spark.driver.memory" -> "3g")) { path =>
1118+
withPropertyFile("spark-conf.properties",
1119+
Map("spark.executor.cores" -> "16", "spark.driver.memory" -> "4g")) { propsFile =>
11191120
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
11201121
val args = Seq(
11211122
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
11221123
"--name", "testApp",
11231124
"--master", "local",
11241125
"--properties-file", propsFile,
1126+
"--extra-properties-files",
11251127
unusedJar.toString)
11261128
val appArgs = new SparkSubmitArguments(args, env = Map("SPARK_CONF_DIR" -> path))
1129+
appArgs.executorCores should be("16")
11271130
appArgs.executorMemory should be("3g")
1131+
appArgs.driverMemory should be("4g")
1132+
}
1133+
}
1134+
}
1135+
1136+
test("SPARK-48392: should skip spark-defaults.conf when --extra-properties-files is not set") {
1137+
forConfDir(Map("spark.executor.memory" -> "3g", "spark.driver.memory" -> "3g")) { path =>
1138+
withPropertyFile("spark-conf.properties",
1139+
Map("spark.executor.cores" -> "16", "spark.driver.memory" -> "4g")) { propsFile =>
1140+
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
1141+
val args = Seq(
1142+
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
1143+
"--name", "testApp",
1144+
"--master", "local",
1145+
"--properties-file", propsFile,
1146+
unusedJar.toString)
1147+
val appArgs = new SparkSubmitArguments(args, env = Map("SPARK_CONF_DIR" -> path))
11281148
appArgs.executorCores should be("16")
1149+
appArgs.driverMemory should be("4g")
1150+
appArgs.executorMemory should be(null)
11291151
}
11301152
}
11311153
}

launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ class SparkSubmitOptionParser {
5555
protected final String PACKAGES = "--packages";
5656
protected final String PACKAGES_EXCLUDE = "--exclude-packages";
5757
protected final String PROPERTIES_FILE = "--properties-file";
58+
protected final String EXTRA_PROPERTIES_FILES = "--extra-properties-files";
5859
protected final String PROXY_USER = "--proxy-user";
5960
protected final String PY_FILES = "--py-files";
6061
protected final String REPOSITORIES = "--repositories";
@@ -130,6 +131,7 @@ class SparkSubmitOptionParser {
130131
{ USAGE_ERROR },
131132
{ VERBOSE, "-v" },
132133
{ VERSION },
134+
{ EXTRA_PROPERTIES_FILES },
133135
};
134136

135137
/**

0 commit comments

Comments
 (0)