@@ -4,131 +4,132 @@ apply from: "$rootDir/gradle/utils.gradle"
4
4
apply plugin : ' java-library'
5
5
6
6
configurations {
7
- sparklingWaterAssemblyJar
7
+ sparklingWaterAssemblyJar
8
8
}
9
9
10
10
dependencies {
11
- sparklingWaterAssemblyJar(project(path : ' :sparkling-water-assembly' , configuration : ' shadow' ))
11
+ sparklingWaterAssemblyJar(project(path : ' :sparkling-water-assembly' , configuration : ' shadow' ))
12
12
13
- api(project(" :sparkling-water-ml" ))
14
- api(project(" :sparkling-water-core" ))
13
+ api(project(" :sparkling-water-ml" ))
14
+ api(project(" :sparkling-water-core" ))
15
15
16
- compileOnly(" org.apache.spark:spark-sql_${ scalaBaseVersion} :${ sparkVersion} " )
17
- compileOnly(" org.apache.spark:spark-mllib_${ scalaBaseVersion} :${ sparkVersion} " )
16
+ compileOnly(" org.apache.spark:spark-sql_${ scalaBaseVersion} :${ sparkVersion} " )
17
+ compileOnly(" org.apache.spark:spark-mllib_${ scalaBaseVersion} :${ sparkVersion} " )
18
18
}
19
19
20
20
task cleanTerraform (type : Delete ) {
21
- delete " build/terraform"
21
+ delete " build/terraform"
22
22
}
23
23
24
24
task copyTerraform (dependsOn : cleanTerraform) {
25
- doLast {
26
- copy {
27
- from ' src/main/terraform'
28
- include " **/*.tf"
29
- into " build/terraform"
30
- }
31
- copy {
32
- from ' ../templates/src/terraform/aws/modules'
33
- include " emr_security/*.tf"
34
- into " build/terraform/aws/modules"
35
- }
25
+ doLast {
26
+ copy {
27
+ from ' src/main/terraform'
28
+ include " **/*.tf"
29
+ into " build/terraform"
36
30
}
31
+ copy {
32
+ from ' ../templates/src/terraform/aws/modules'
33
+ include " emr_security/*.tf"
34
+ into " build/terraform/aws/modules"
35
+ }
36
+ }
37
37
}
38
38
39
39
task substituteTerraform (dependsOn : copyTerraform) {
40
- doLast {
41
- def tfBaseDir = " ${ project.buildDir.toString()} /terraform/aws/"
42
- def tfScripts = [
43
- " ${ tfBaseDir} /variables.tf" ,
44
- " ${ tfBaseDir} /modules/emr_benchmarks_deployment/variables.tf" ,
45
- " ${ tfBaseDir} /modules/emr_benchmarks_deployment/main.tf" ]
46
- tfScripts . each { path ->
47
- def contents = file( path) . getText( ' UTF-8 ' )
48
- contents = contents
49
- .replaceAll( " SUBST_PACKAGE_FILE " , " ${ configurations.sparklingWaterAssemblyJar.singleFile } " )
50
- .replaceAll(" SUBST_BENCHMARKS_FILE " , " $b uildDir /libs/sparkling-water-benchmarks_ $s calaBaseVersion - ${ version } .jar " )
51
- .replaceAll(" SUBST_H2O_VERSION_NAME " , h2oMajorName )
52
- .replaceAll(" SUBST_H2O_VERSION " , h2oVersion )
53
- .replaceAll(" SUBST_H2O_BUILD " , h2oBuild )
54
- .replaceAll(" SUBST_SW_VERSION " , version . toString() )
55
- .replaceAll(" SUBST_SCALA_VERSION " , scalaBaseVersion )
56
- .replaceAll(" SUBST_EMR_VERSION " , supportedEmrVersion )
57
-
58
- file(path) . write(contents, ' UTF-8 ' )
59
- }
40
+ doLast {
41
+ def tfBaseDir = " ${ project.buildDir.toString()} /terraform/aws/"
42
+ def tfScripts = [
43
+ " ${ tfBaseDir} /variables.tf" ,
44
+ " ${ tfBaseDir} /modules/emr_benchmarks_deployment/variables.tf" ,
45
+ " ${ tfBaseDir} /modules/emr_benchmarks_deployment/main.tf"
46
+ ]
47
+ tfScripts . each { path ->
48
+ def contents = file(path) . getText( ' UTF-8 ' )
49
+ contents = contents
50
+ .replaceAll(" SUBST_PACKAGE_FILE " , " ${ configurations.sparklingWaterAssemblyJar.singleFile } " )
51
+ .replaceAll(" SUBST_BENCHMARKS_FILE " , " $b uildDir /libs/sparkling-water-benchmarks_ $s calaBaseVersion - ${ version } .jar " )
52
+ .replaceAll(" SUBST_H2O_VERSION_NAME " , h2oMajorName )
53
+ .replaceAll(" SUBST_H2O_VERSION " , h2oVersion )
54
+ .replaceAll(" SUBST_H2O_BUILD " , h2oBuild )
55
+ .replaceAll(" SUBST_SW_VERSION " , version . toString() )
56
+ .replaceAll(" SUBST_SCALA_VERSION " , scalaBaseVersion )
57
+ .replaceAll( " SUBST_EMR_VERSION " , supportedEmrVersion)
58
+
59
+ file(path) . write(contents, ' UTF-8 ' )
60
60
}
61
+ }
61
62
}
62
63
63
64
task cleanOutput (type : Delete ) {
64
- delete " output"
65
+ delete " output"
65
66
}
66
67
67
68
task runBenchmarks (dependsOn : [" :sparkling-water-assembly:shadowJar" , substituteTerraform, cleanOutput]) {
68
- doLast {
69
- exec {
70
- def accessKey = project. property(" aws_access_key" )
71
- def secretKey = project. property(" aws_secret_key" )
72
- def publicKey = project. property(" aws_ssh_public_key" )
73
-
74
- environment(" aws_access_key" , accessKey)
75
- environment(" aws_secret_key" , secretKey)
76
- environment(" aws_ssh_public_key" , publicKey)
77
- environment(" datasets" , " datasets.json" )
78
-
79
- commandLine " ./run_benchmarks.sh"
80
- }
69
+ doLast {
70
+ exec {
71
+ def accessKey = project. property(" aws_access_key" )
72
+ def secretKey = project. property(" aws_secret_key" )
73
+ def publicKey = project. property(" aws_ssh_public_key" )
74
+
75
+ environment(" aws_access_key" , accessKey)
76
+ environment(" aws_secret_key" , secretKey)
77
+ environment(" aws_ssh_public_key" , publicKey)
78
+ environment(" datasets" , " datasets.json" )
79
+
80
+ commandLine " ./run_benchmarks.sh"
81
81
}
82
+ }
82
83
}
83
84
84
85
task runBigDataSparkToH2OConversionBenchmarks (dependsOn : [" :sparkling-water-assembly:shadowJar" , substituteTerraform, cleanOutput]) {
85
- doLast {
86
- exec {
87
- def accessKey = project. property(" aws_access_key" )
88
- def secretKey = project. property(" aws_secret_key" )
89
- def publicKey = project. property(" aws_ssh_public_key" )
90
-
91
- environment(" aws_access_key" , accessKey)
92
- environment(" aws_secret_key" , secretKey)
93
- environment(" aws_ssh_public_key" , publicKey)
94
- environment(" aws_instance_type" , " m5.4xlarge" )
95
- environment(" aws_core_instance_count" , " 10" )
96
- environment(" datasets" , " bigDatasets.json" )
97
- environment(" other_arguments" , " -b DataFrameToH2OFrameConversionBenchmark" )
98
- environment(" driver_memory_gb" , " 8" )
99
- environment(" executor_memory_gb" , " 32" )
100
- environment(" run_yarn_internal" , " false" )
101
- environment(" run_yarn_external" , " true" )
102
- environment(" run_local_internal" , " false" )
103
-
104
- commandLine " ./run_benchmarks.sh"
105
- }
86
+ doLast {
87
+ exec {
88
+ def accessKey = project. property(" aws_access_key" )
89
+ def secretKey = project. property(" aws_secret_key" )
90
+ def publicKey = project. property(" aws_ssh_public_key" )
91
+
92
+ environment(" aws_access_key" , accessKey)
93
+ environment(" aws_secret_key" , secretKey)
94
+ environment(" aws_ssh_public_key" , publicKey)
95
+ environment(" aws_instance_type" , " m5.4xlarge" )
96
+ environment(" aws_core_instance_count" , " 10" )
97
+ environment(" datasets" , " bigDatasets.json" )
98
+ environment(" other_arguments" , " -b DataFrameToH2OFrameConversionBenchmark" )
99
+ environment(" driver_memory_gb" , " 8" )
100
+ environment(" executor_memory_gb" , " 32" )
101
+ environment(" run_yarn_internal" , " false" )
102
+ environment(" run_yarn_external" , " true" )
103
+ environment(" run_local_internal" , " false" )
104
+
105
+ commandLine " ./run_benchmarks.sh"
106
106
}
107
+ }
107
108
}
108
109
109
110
task runBigDataH2OtoSparkConversionBenchmarks (dependsOn : [" :sparkling-water-assembly:shadowJar" , substituteTerraform, cleanOutput]) {
110
- doLast {
111
- exec {
112
- def accessKey = project. property(" aws_access_key" )
113
- def secretKey = project. property(" aws_secret_key" )
114
- def publicKey = project. property(" aws_ssh_public_key" )
115
-
116
- environment(" aws_access_key" , accessKey)
117
- environment(" aws_secret_key" , secretKey)
118
- environment(" aws_ssh_public_key" , publicKey)
119
- environment(" aws_instance_type" , " m5.4xlarge" )
120
- environment(" aws_core_instance_count" , " 10" )
121
- environment(" datasets" , " bigDatasets.json" )
122
- environment(" other_arguments" , " -b H2OFrameToDataFrameConversionBenchmark" )
123
- environment(" driver_memory_gb" , " 8" )
124
- environment(" executor_memory_gb" , " 32" )
125
- environment(" run_yarn_internal" , " false" )
126
- environment(" run_yarn_external" , " true" )
127
- environment(" run_local_internal" , " false" )
128
-
129
- commandLine " ./run_benchmarks.sh"
130
- }
111
+ doLast {
112
+ exec {
113
+ def accessKey = project. property(" aws_access_key" )
114
+ def secretKey = project. property(" aws_secret_key" )
115
+ def publicKey = project. property(" aws_ssh_public_key" )
116
+
117
+ environment(" aws_access_key" , accessKey)
118
+ environment(" aws_secret_key" , secretKey)
119
+ environment(" aws_ssh_public_key" , publicKey)
120
+ environment(" aws_instance_type" , " m5.4xlarge" )
121
+ environment(" aws_core_instance_count" , " 10" )
122
+ environment(" datasets" , " bigDatasets.json" )
123
+ environment(" other_arguments" , " -b H2OFrameToDataFrameConversionBenchmark" )
124
+ environment(" driver_memory_gb" , " 8" )
125
+ environment(" executor_memory_gb" , " 32" )
126
+ environment(" run_yarn_internal" , " false" )
127
+ environment(" run_yarn_external" , " true" )
128
+ environment(" run_local_internal" , " false" )
129
+
130
+ commandLine " ./run_benchmarks.sh"
131
131
}
132
+ }
132
133
}
133
134
134
- substituteTerraform. dependsOn build
135
+ substituteTerraform. dependsOn build
0 commit comments