@@ -24,7 +24,6 @@ import scala.Predef.{println => _, _}
2424// scalastyle:on println
2525import scala .concurrent .Future
2626import scala .reflect .classTag
27- import scala .reflect .internal .util .ScalaClassLoader .savingContextLoader
2827import scala .reflect .io .File
2928import scala .tools .nsc .{GenericRunnerSettings , Properties }
3029import scala .tools .nsc .Settings
@@ -33,7 +32,7 @@ import scala.tools.nsc.interpreter.{AbstractOrMissingHandler, ILoop, IMain, JPri
3332import scala .tools .nsc .interpreter .{NamedParam , SimpleReader , SplashLoop , SplashReader }
3433import scala .tools .nsc .interpreter .StdReplTags .tagOfIMain
3534import scala .tools .nsc .util .stringFromStream
36- import scala .util .Properties .{javaVersion , javaVmName , versionString }
35+ import scala .util .Properties .{javaVersion , javaVmName , versionNumberString , versionString }
3736
3837/**
3938 * A Spark-specific interactive shell.
@@ -43,10 +42,32 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
4342 def this (in0 : BufferedReader , out : JPrintWriter ) = this (Some (in0), out)
4443 def this () = this (None , new JPrintWriter (Console .out, true ))
4544
45+ /**
46+ * TODO: Remove the following `override` when the support of Scala 2.11 is ended
47+ * Scala 2.11 has a bug of finding imported types in class constructors, extends clause
48+ * which is fixed in Scala 2.12 but never be back-ported into Scala 2.11.x.
49+ * As a result, we copied the fixes into `SparkILoopInterpreter`. See SPARK-22393 for detail.
50+ */
4651 override def createInterpreter (): Unit = {
47- intp = new SparkILoopInterpreter (settings, out)
52+ if (isScala2_11) {
53+ if (addedClasspath != " " ) {
54+ settings.classpath append addedClasspath
55+ }
56+ // scalastyle:off classforname
57+ // Have to use the default classloader to match the one used in
58+ // `classOf[Settings]` and `classOf[JPrintWriter]`.
59+ intp = Class .forName(" org.apache.spark.repl.SparkILoopInterpreter" )
60+ .getDeclaredConstructor(Seq (classOf [Settings ], classOf [JPrintWriter ]): _* )
61+ .newInstance(Seq (settings, out): _* )
62+ .asInstanceOf [IMain ]
63+ // scalastyle:on classforname
64+ } else {
65+ super .createInterpreter()
66+ }
4867 }
4968
69+ private val isScala2_11 = versionNumberString.startsWith(" 2.11" )
70+
5071 val initializationCommands : Seq [String ] = Seq (
5172 """
5273 @transient val spark = if (org.apache.spark.repl.Main.sparkSession != null) {
@@ -124,6 +145,26 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
124145 super .replay()
125146 }
126147
148+ /**
149+ * TODO: Remove `runClosure` when the support of Scala 2.11 is ended
150+ */
151+ private def runClosure (body : => Boolean ): Boolean = {
152+ if (isScala2_11) {
153+ // In Scala 2.11, there is a bug that interpret could set the current thread's
154+ // context classloader, but fails to reset it to its previous state when returning
155+ // from that method. This is fixed in SI-8521 https://github.com/scala/scala/pull/5657
156+ // which is never back-ported into Scala 2.11.x. The following is a workaround fix.
157+ val original = Thread .currentThread().getContextClassLoader
158+ try {
159+ body
160+ } finally {
161+ Thread .currentThread().setContextClassLoader(original)
162+ }
163+ } else {
164+ body
165+ }
166+ }
167+
127168 /**
128169 * The following code is mostly a copy of `process` implementation in `ILoop.scala` in Scala
129170 *
@@ -138,7 +179,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
138179 * We should remove this duplication once Scala provides a way to load our custom initialization
139180 * code, and also customize the ordering of printing welcome message.
140181 */
141- override def process (settings : Settings ): Boolean = savingContextLoader {
182+ override def process (settings : Settings ): Boolean = runClosure {
142183
143184 def newReader = in0.fold(chooseReader(settings))(r => SimpleReader (r, out, interactive = true ))
144185
0 commit comments