@@ -50,6 +50,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
50
50
var executorCores : String = null
51
51
var totalExecutorCores : String = null
52
52
var propertiesFile : String = null
53
+ private var extraPropertiesFiles : Boolean = false
53
54
var driverMemory : String = null
54
55
var driverExtraClassPath : String = null
55
56
var driverExtraLibraryPath : String = null
@@ -87,27 +88,6 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
87
88
88
89
override protected def logName : String = classOf [SparkSubmitArguments ].getName
89
90
90
- /** Default properties present in the currently defined defaults file. */
91
- lazy val defaultSparkProperties : HashMap [String , String ] = {
92
- val defaultProperties = new HashMap [String , String ]()
93
- if (verbose) {
94
- logInfo(log " Using properties file: ${MDC (PATH , propertiesFile)}" )
95
- }
96
- Option (propertiesFile).foreach { filename =>
97
- val properties = Utils .getPropertiesFromFile(filename)
98
- properties.foreach { case (k, v) =>
99
- defaultProperties(k) = v
100
- }
101
- // Property files may contain sensitive information, so redact before printing
102
- if (verbose) {
103
- Utils .redact(properties).foreach { case (k, v) =>
104
- logInfo(log " Adding default property: ${MDC (KEY , k)}= ${MDC (VALUE , v)}" )
105
- }
106
- }
107
- }
108
- defaultProperties
109
- }
110
-
111
91
// Set parameters from command line arguments
112
92
parse(args.asJava)
113
93
@@ -123,31 +103,43 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
123
103
validateArguments()
124
104
125
105
/**
126
- * Merge values from the default properties file with those specified through --conf .
127
- * When this is called, `sparkProperties` is already filled with configs from the latter.
106
+ * Load properties from the file with the given path into `sparkProperties` .
107
+ * No-op if the file path is null
128
108
*/
129
- private def mergeDefaultSparkProperties (): Unit = {
130
- // Honor --conf before the specified properties file and defaults file
131
- defaultSparkProperties.foreach { case (k, v) =>
132
- if (! sparkProperties.contains(k)) {
133
- sparkProperties(k) = v
109
+ private def loadPropertiesFromFile (filePath : String ): Unit = {
110
+ if (filePath != null ) {
111
+ if (verbose) {
112
+ logInfo(log " Using properties file: ${MDC (PATH , filePath)}" )
134
113
}
135
- }
136
-
137
- // Also load properties from `spark-defaults.conf` if they do not exist in the properties file
138
- // and --conf list
139
- val defaultSparkConf = Utils .getDefaultPropertiesFile(env)
140
- Option (defaultSparkConf).foreach { filename =>
141
- val properties = Utils .getPropertiesFromFile(filename)
114
+ val properties = Utils .getPropertiesFromFile(filePath)
142
115
properties.foreach { case (k, v) =>
143
116
if (! sparkProperties.contains(k)) {
144
117
sparkProperties(k) = v
145
118
}
146
119
}
120
+ // Property files may contain sensitive information, so redact before printing
121
+ if (verbose) {
122
+ Utils .redact(properties).foreach { case (k, v) =>
123
+ logInfo(log " Adding default property: ${MDC (KEY , k)}= ${MDC (VALUE , v)}" )
124
+ }
125
+ }
147
126
}
127
+ }
148
128
149
- if (propertiesFile == null ) {
150
- propertiesFile = defaultSparkConf
129
+ /**
130
+ * Merge values from the default properties file with those specified through --conf.
131
+ * When this is called, `sparkProperties` is already filled with configs from the latter.
132
+ */
133
+ private def mergeDefaultSparkProperties (): Unit = {
134
+ // Honor --conf before the specified properties file and defaults file
135
+ loadPropertiesFromFile(propertiesFile)
136
+
137
+ // Also load properties from `spark-defaults.conf` if they do not exist in the properties file
138
+ // and --conf list when:
139
+ // - no input properties file is specified
140
+ // - input properties file is specified, but `--extra-properties-files` flag is set
141
+ if (propertiesFile == null || extraPropertiesFiles) {
142
+ loadPropertiesFromFile(Utils .getDefaultPropertiesFile(env))
151
143
}
152
144
}
153
145
@@ -405,6 +397,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
405
397
case PROPERTIES_FILE =>
406
398
propertiesFile = value
407
399
400
+ case EXTRA_PROPERTIES_FILES =>
401
+ extraPropertiesFiles = true
402
+
408
403
case KILL_SUBMISSION =>
409
404
submissionToKill = value
410
405
if (action != null ) {
@@ -548,6 +543,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
548
543
| --conf, -c PROP=VALUE Arbitrary Spark configuration property.
549
544
| --properties-file FILE Path to a file from which to load extra properties. If not
550
545
| specified, this will look for conf/spark-defaults.conf.
546
+ | --extra-properties-files Whether to load properties from conf/spark-defaults.conf,
547
+ | even if --properties-file is specified.
551
548
|
552
549
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
553
550
| --driver-java-options Extra Java options to pass to the driver.
0 commit comments