@@ -50,6 +50,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
50
50
var executorCores : String = null
51
51
var totalExecutorCores : String = null
52
52
var propertiesFile : String = null
53
+ private var extraPropertiesFile : Boolean = false
53
54
var driverMemory : String = null
54
55
var driverExtraClassPath : String = null
55
56
var driverExtraLibraryPath : String = null
@@ -85,27 +86,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
85
86
var submissionToRequestStatusFor : String = null
86
87
var useRest : Boolean = false // used internally
87
88
88
- /** Default properties present in the currently defined defaults file. */
89
- lazy val defaultSparkProperties : HashMap [String , String ] = {
90
- val defaultProperties = new HashMap [String , String ]()
91
- if (verbose) {
92
- logInfo(log " Using properties file: ${MDC (PATH , propertiesFile)}" )
93
- }
94
- Option (propertiesFile).foreach { filename =>
95
- val properties = Utils .getPropertiesFromFile(filename)
96
- properties.foreach { case (k, v) =>
97
- defaultProperties(k) = v
98
- }
99
- // Property files may contain sensitive information, so redact before printing
100
- if (verbose) {
101
- Utils .redact(properties).foreach { case (k, v) =>
102
- logInfo(log " Adding default property: ${MDC (KEY , k)}= ${MDC (VALUE , v)}" )
103
- }
104
- }
105
- }
106
- defaultProperties
107
- }
108
-
109
89
// Set parameters from command line arguments
110
90
parse(args.asJava)
111
91
@@ -121,31 +101,43 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
121
101
validateArguments()
122
102
123
103
/**
124
- * Merge values from the default properties file with those specified through --conf .
125
- * When this is called, `sparkProperties` is already filled with configs from the latter.
104
+ * Load properties from the file with the given path into `sparkProperties` .
105
+ * No-op if the file path is null
126
106
*/
127
- private def mergeDefaultSparkProperties (): Unit = {
128
- // Honor --conf before the specified properties file and defaults file
129
- defaultSparkProperties.foreach { case (k, v) =>
130
- if (! sparkProperties.contains(k)) {
131
- sparkProperties(k) = v
107
+ private def loadPropertiesFromFile (filePath : String ): Unit = {
108
+ if (filePath != null ) {
109
+ if (verbose) {
110
+ logInfo(log " Using properties file: ${MDC (PATH , filePath)}" )
132
111
}
133
- }
134
-
135
- // Also load properties from `spark-defaults.conf` if they do not exist in the properties file
136
- // and --conf list
137
- val defaultSparkConf = Utils .getDefaultPropertiesFile(env)
138
- Option (defaultSparkConf).foreach { filename =>
139
- val properties = Utils .getPropertiesFromFile(filename)
112
+ val properties = Utils .getPropertiesFromFile(filePath)
140
113
properties.foreach { case (k, v) =>
141
114
if (! sparkProperties.contains(k)) {
142
115
sparkProperties(k) = v
143
116
}
144
117
}
118
+ // Property files may contain sensitive information, so redact before printing
119
+ if (verbose) {
120
+ Utils .redact(properties).foreach { case (k, v) =>
121
+ logInfo(log " Adding default property: ${MDC (KEY , k)}= ${MDC (VALUE , v)}" )
122
+ }
123
+ }
145
124
}
125
+ }
146
126
147
- if (propertiesFile == null ) {
148
- propertiesFile = defaultSparkConf
127
+ /**
128
+ * Merge values from the default properties file with those specified through --conf.
129
+ * When this is called, `sparkProperties` is already filled with configs from the latter.
130
+ */
131
+ private def mergeDefaultSparkProperties (): Unit = {
132
+ // Honor --conf before the specified properties file and defaults file
133
+ loadPropertiesFromFile(propertiesFile)
134
+
135
+ // Also load properties from `spark-defaults.conf` if they do not exist in the properties file
136
+ // and --conf list when:
137
+ // - no input properties file is specified
138
+ // - input properties file is specified, but `--extra-properties-files` flag is set
139
+ if (propertiesFile == null || extraPropertiesFile) {
140
+ loadPropertiesFromFile(Utils .getDefaultPropertiesFile(env))
149
141
}
150
142
}
151
143
@@ -403,6 +395,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
403
395
case PROPERTIES_FILE =>
404
396
propertiesFile = value
405
397
398
+ case EXTRA_PROPERTIES_FILES =>
399
+ extraPropertiesFile = true
400
+
406
401
case KILL_SUBMISSION =>
407
402
submissionToKill = value
408
403
if (action != null ) {
@@ -546,6 +541,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
546
541
| --conf, -c PROP=VALUE Arbitrary Spark configuration property.
547
542
| --properties-file FILE Path to a file from which to load extra properties. If not
548
543
| specified, this will look for conf/spark-defaults.conf.
544
+ | --extra-properties-files Whether to load properties from conf/spark-defaults.conf,
545
+ | even if --properties-file is specified.
549
546
|
550
547
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
551
548
| --driver-java-options Extra Java options to pass to the driver.
0 commit comments