Skip to content

Commit 48fa13a

Browse files
authored
[SW-2636] A workaround for Scala compiler api breaking change (#2664)
* [SW-2636] A workaround for Scala compiler api breaking change
1 parent 14dcf8c commit 48fa13a

File tree

6 files changed

+126
-43
lines changed

6 files changed

+126
-43
lines changed

gradle/scala.gradle

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,21 @@ configurations {
1515
scalaCompilerPlugin
1616
}
1717

18+
//forcing the scala version but not messing with the incremental compiler because it will not accept it
19+
configurations
20+
.findAll { conf -> !conf.name.contains("scalaCompilerPlugin") && conf.name != "zinc" }
21+
.each {conf ->
22+
conf.resolutionStrategy {
23+
force "org.scala-lang:scala-compiler:${scalaVersion}"
24+
force "org.scala-lang:scala-library:${scalaVersion}"
25+
force "org.scala-lang:scala-reflect:${scalaVersion}"
26+
}
27+
}
28+
1829
dependencies {
1930
scalaCompilerPlugin "org.scalamacros:paradise_${scalaVersion}:2.1.1"
2031
}
2132

22-
2333
// Activate Zinc compiler and configure scalac
2434
tasks.withType(ScalaCompile) {
2535
scalaCompileOptions.additionalParameters = [
@@ -66,7 +76,7 @@ task scaladocJar(type: Jar, dependsOn: scaladoc) {
6676
from scaladoc
6777
}
6878

69-
task javadocJar (type: Jar, dependsOn: scaladoc) {
79+
task javadocJar(type: Jar, dependsOn: scaladoc) {
7080
archiveClassifier = 'javadoc'
7181
from scaladoc
7282
}

repl/build.gradle

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ dependencies {
1010
testImplementation("org.apache.spark:spark-repl_${scalaBaseVersion}:${sparkVersion}")
1111
testImplementation("org.scalatest:scalatest_${scalaBaseVersion}:${scalaTestVersion}")
1212
testImplementation("junit:junit:4.11")
13+
testImplementation(project(":sparkling-water-core"))
14+
testImplementation(project(path: ':sparkling-water-core', configuration: 'testArchives'))
1315
}
1416

1517
defineStandardPublication().call()

repl/src/main/scala/ai/h2o/sparkling/repl/BaseH2OInterpreter.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ import scala.Predef.{println => _}
2929
import scala.annotation.tailrec
3030
import scala.collection.mutable
3131
import scala.language.{existentials, implicitConversions, postfixOps}
32+
import scala.reflect.classTag
3233
import scala.tools.nsc._
3334
import scala.tools.nsc.interpreter.{Results => IR, _}
3435

@@ -110,7 +111,7 @@ private[repl] abstract class BaseH2OInterpreter(val sparkContext: SparkContext,
110111
}
111112

112113
private def initializeInterpreter(): Unit = {
113-
settings = createSettings()
114+
settings = createSettings(classTag[BaseH2OInterpreter].runtimeClass.getClassLoader)
114115
intp = createInterpreter()
115116
val spark = SparkSessionUtils.active
116117
addThunk(intp.beQuietDuring {
@@ -142,7 +143,7 @@ private[repl] abstract class BaseH2OInterpreter(val sparkContext: SparkContext,
142143
/**
143144
* Initialize the compiler settings
144145
*/
145-
protected def createSettings(): Settings
146+
protected def createSettings(classLoader: ClassLoader): Settings
146147

147148
/**
148149
* Run all thunks after the interpreter has been initialized and throw exception if anything went wrong

repl/src/main/scala/ai/h2o/sparkling/repl/H2OInterpreter.scala

Lines changed: 30 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -21,20 +21,19 @@
2121
*/
2222
package ai.h2o.sparkling.repl
2323

24-
import java.io.File
25-
import java.net.URI
26-
2724
import org.apache.spark.{SparkConf, SparkContext}
2825

26+
import java.io.File
27+
import java.net.URI
28+
import scala.io.Source
2929
import scala.language.{existentials, implicitConversions, postfixOps}
30-
import scala.reflect._
3130
import scala.tools.nsc._
3231

3332
/**
3433
* H2O Interpreter which is use to interpret scala code
3534
*
3635
* @param sparkContext spark context
37-
* @param hc H2OContext
36+
* @param hc H2OContext
3837
* @param sessionId session ID for interpreter
3938
*/
4039
class H2OInterpreter(sparkContext: SparkContext, hc: Any, sessionId: Int)
@@ -44,51 +43,47 @@ class H2OInterpreter(sparkContext: SparkContext, hc: Any, sessionId: Int)
4443
H2OIMain.createInterpreter(sparkContext, settings, responseWriter, sessionId)
4544
}
4645

47-
override def createSettings(): Settings = {
48-
val settings = new Settings(echo)
49-
// prevent each repl line from being run in a new thread
50-
settings.Yreplsync.value = true
46+
override def createSettings(classLoader: ClassLoader): Settings = {
47+
val result = new Settings(echo)
5148

5249
// Check if app.class.path resource on given classloader is set. In case it exists, set it as classpath
5350
// ( instead of using java class path right away)
5451
// This solves problem explained here: https://gist.github.com/harrah/404272
55-
settings.usejavacp.value = true
56-
val loader = classTag[H2OInterpreter].runtimeClass.getClassLoader
57-
val method =
58-
settings.getClass.getSuperclass.getDeclaredMethod("getClasspath", classOf[String], classOf[ClassLoader])
59-
method.setAccessible(true)
60-
if (method.invoke(settings, "app", loader).asInstanceOf[Option[String]].isDefined) {
61-
settings.usejavacp.value = false
62-
settings.embeddedDefaults(loader)
63-
}
64-
65-
val conf = sparkContext.getConf
66-
val jars = getJarsForShell(conf)
52+
val appClassPath = getClassLoaderAppClassPath(classLoader)
53+
val useJavaCpArg = if (appClassPath.isEmpty) Some("-usejavacp") else None
54+
val classPathArg =
55+
appClassPath
56+
.orElse(getReplLocalJars(sparkContext.getConf))
57+
.map(classPathValue => List[String]("-classpath", classPathValue))
6758

6859
val interpArguments = List(
60+
"-Yrepl-sync", // prevent each repl line from being run in a new thread
6961
"-Yrepl-class-based", // ensure that lines in REPL are wrapped in the classes instead of objects
7062
"-Yrepl-outdir",
71-
s"${H2OInterpreter.classOutputDirectory.getAbsolutePath}",
72-
"-classpath",
73-
jars)
63+
s"${H2OInterpreter.classOutputDirectory.getAbsolutePath}") ++ useJavaCpArg ++ classPathArg.getOrElse(List.empty)
7464

75-
settings.processArguments(interpArguments, processAll = true)
65+
result.processArguments(interpArguments, processAll = true)
7666

77-
settings
67+
result
7868
}
7969

80-
private def getJarsForShell(conf: SparkConf): String = {
70+
private def getClassLoaderAppClassPath(runtimeClassLoader: ClassLoader): Option[String] =
71+
for {
72+
classLoader <- Option(runtimeClassLoader)
73+
appClassPathResource <- Option(classLoader.getResource("app.class.path"))
74+
appClassPath = Source.fromURL(appClassPathResource)
75+
} yield appClassPath.mkString
76+
77+
private def getReplLocalJars(conf: SparkConf): Option[String] = {
8178
val localJars = conf.getOption("spark.repl.local.jars")
82-
val jarPaths = localJars.getOrElse("").split(",")
83-
jarPaths
84-
.map { path =>
85-
// Remove file:///, file:// or file:/ scheme if exists for each jar
86-
if (path.startsWith("file:")) new File(new URI(path)).getPath else path
87-
}
88-
.mkString(File.pathSeparator)
79+
val jarPaths = localJars.map(_.split(",").toSeq)
80+
jarPaths.map(_.map { path =>
81+
// Remove file:///, file:// or file:/ scheme if exists for each jar
82+
if (path.startsWith("file:")) new File(new URI(path)).getPath else path
83+
}.mkString(File.pathSeparator))
8984
}
9085
}
9186

9287
object H2OInterpreter {
93-
def classOutputDirectory = H2OIMain.classOutputDirectory
88+
def classOutputDirectory: File = H2OIMain.classOutputDirectory
9489
}
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
package ai.h2o.sparkling.repl
18+
19+
import ai.h2o.sparkling.SharedH2OTestContext
20+
import org.apache.spark.sql.SparkSession
21+
import org.junit.runner.RunWith
22+
import org.scalatest._
23+
import org.scalatest.junit.JUnitRunner
24+
25+
import java.net.URL
26+
import java.nio.file.Files
27+
28+
@RunWith(classOf[JUnitRunner])
29+
class H2OInterpreterTestSuite extends FunSuite with SharedH2OTestContext with Matchers {
30+
31+
private val replLocalJarsPrefix = "spark-repl-local-jars"
32+
33+
override def createSparkSession(): SparkSession = {
34+
val tempReplJarsDir = tempDirNotEmpty(replLocalJarsPrefix).toUri.toString
35+
val conf = defaultSparkConf.set("spark.repl.local.jars", tempReplJarsDir)
36+
sparkSession("local[*]", conf)
37+
}
38+
39+
private var objectUnderTest: H2OInterpreter = _
40+
41+
override def beforeAll(): Unit = {
42+
super.beforeAll()
43+
objectUnderTest = new H2OInterpreter(sc, hc, 1)
44+
}
45+
46+
test("should set correct repl related settings") {
47+
val result = objectUnderTest.createSettings(classLoaderStub(hasAppClassPath = false))
48+
result.Yreplsync.value shouldBe true
49+
result.Yreplclassbased.value shouldBe true
50+
result.Yreploutdir.value shouldBe H2OInterpreter.classOutputDirectory.getAbsolutePath
51+
}
52+
53+
test("should use spark.repl.local.jars if available and app.class.path is not set") {
54+
val result = objectUnderTest.createSettings(classLoaderStub(hasAppClassPath = false))
55+
result.usejavacp.value shouldBe true
56+
result.classpath.value should include(replLocalJarsPrefix)
57+
}
58+
59+
test("should not set javacp if app.class.path is set") {
60+
val tmpFilePrefix = "app-class-path"
61+
val result = objectUnderTest.createSettings(
62+
classLoaderStub(hasAppClassPath = true, tempDirNotEmpty(tmpFilePrefix).toUri.toURL))
63+
result.usejavacp.value shouldBe false
64+
result.classpath.value should startWith(tmpFilePrefix)
65+
}
66+
67+
private def classLoaderStub(hasAppClassPath: Boolean, classPathUrl: URL = null) = {
68+
new ClassLoader() {
69+
override def getResource(name: String): URL =
70+
if (name == "app.class.path" && hasAppClassPath) classPathUrl else null
71+
}
72+
}
73+
74+
private def tempDirNotEmpty(filePrefix: String) = {
75+
val tempDir = Files.createTempDirectory(filePrefix)
76+
Files.createTempFile(tempDir, filePrefix, "-file")
77+
tempDir
78+
}
79+
}

repl/src/test/scala/ai/h2o/sparkling/repl/PatchUtilsTestSuite.scala

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,6 @@ class PatchUtilsTestSuite extends FunSuite with BeforeAndAfterAll {
4747
}
4848

4949
test("[SW-386] Test patched OuterScopes") {
50-
val regexBeforePatch = getRegexp
51-
// Default regexp fails for our class names with intp_id prefix
52-
assertMatch(regexBeforePatch, EXAMPLE_CLASS_NAME, FAILED_MATCH)
53-
5450
PatchUtils.PatchManager.patch("SW-386", Thread.currentThread().getContextClassLoader)
5551

5652
val regexAfterPatch = getRegexp

0 commit comments

Comments
 (0)