Skip to content

Commit b79d362

Browse files
authored
Merge pull request apache#80 from Willymontaz/FIX_BUILD_SCALA_2.11.12
Fix scala 2.11.12 build
2 parents 16ee18c + ba07914 commit b79d362

File tree

7 files changed

+395
-56
lines changed

7 files changed

+395
-56
lines changed

LICENSE

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -242,18 +242,18 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
242242
(BSD licence) ANTLR ST4 4.0.4 (org.antlr:ST4:4.0.4 - http://www.stringtemplate.org)
243243
(BSD licence) ANTLR StringTemplate (org.antlr:stringtemplate:3.2.1 - http://www.stringtemplate.org)
244244
(BSD License) Javolution (javolution:javolution:5.5.1 - http://javolution.org)
245-
(BSD) JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
245+
(BSD) JLine (jline:jline:2.14.3 - https://github.com/jline/jline2)
246246
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.3 - http://paranamer.codehaus.org/paranamer)
247247
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.6 - http://paranamer.codehaus.org/paranamer)
248248
(BSD 3 Clause) Scala (http://www.scala-lang.org/download/#License)
249249
(Interpreter classes (all .scala files in repl/src/main/scala
250250
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
251251
and for SerializableMapWrapper in JavaUtils.scala)
252-
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.8 - http://www.scala-lang.org/)
253-
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.8 - http://www.scala-lang.org/)
254-
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.8 - http://www.scala-lang.org/)
255-
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.8 - http://www.scala-lang.org/)
256-
(BSD-like) Scalap (org.scala-lang:scalap:2.11.8 - http://www.scala-lang.org/)
252+
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.12 - http://www.scala-lang.org/)
253+
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.12 - http://www.scala-lang.org/)
254+
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.12 - http://www.scala-lang.org/)
255+
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.12 - http://www.scala-lang.org/)
256+
(BSD-like) Scalap (org.scala-lang:scalap:2.11.12 - http://www.scala-lang.org/)
257257
(BSD-style) scalacheck (org.scalacheck:scalacheck_2.11:1.10.0 - http://www.scalacheck.org)
258258
(BSD-style) spire (org.spire-math:spire_2.11:0.7.1 - http://spire-math.org)
259259
(BSD-style) spire-macros (org.spire-math:spire-macros_2.11:0.7.1 - http://spire-math.org)

dev/deps/spark-deps-hadoop-2.6

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -158,12 +158,12 @@ pmml-schema-1.2.15.jar
158158
protobuf-java-2.5.0.jar
159159
py4j-0.10.7.jar
160160
pyrolite-4.13.jar
161-
scala-compiler-2.11.8.jar
162-
scala-library-2.11.8.jar
161+
scala-compiler-2.11.12.jar
162+
scala-library-2.11.12.jar
163163
scala-parser-combinators_2.11-1.0.4.jar
164-
scala-reflect-2.11.8.jar
164+
scala-reflect-2.11.12.jar
165165
scala-xml_2.11-1.0.2.jar
166-
scalap-2.11.8.jar
166+
scalap-2.11.12.jar
167167
shapeless_2.11-2.3.2.jar
168168
slf4j-api-1.7.16.jar
169169
slf4j-log4j12-1.7.16.jar

dev/deps/spark-deps-hadoop-2.7

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -159,12 +159,12 @@ pmml-schema-1.2.15.jar
159159
protobuf-java-2.5.0.jar
160160
py4j-0.10.7.jar
161161
pyrolite-4.13.jar
162-
scala-compiler-2.11.8.jar
163-
scala-library-2.11.8.jar
162+
scala-compiler-2.11.12.jar
163+
scala-library-2.11.12.jar
164164
scala-parser-combinators_2.11-1.0.4.jar
165-
scala-reflect-2.11.8.jar
165+
scala-reflect-2.11.12.jar
166166
scala-xml_2.11-1.0.2.jar
167-
scalap-2.11.8.jar
167+
scalap-2.11.12.jar
168168
shapeless_2.11-2.3.2.jar
169169
slf4j-api-1.7.16.jar
170170
slf4j-log4j12-1.7.16.jar

dev/deps/spark-deps-hadoop-3.1

Whitespace-only changes.
Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.repl
19+
20+
import scala.tools.nsc.interpreter.{ExprTyper, IR}
21+
22+
trait SparkExprTyper extends ExprTyper {
23+
24+
import repl._
25+
import global.{reporter => _, Import => _, _}
26+
import naming.freshInternalVarName
27+
28+
def doInterpret(code: String): IR.Result = {
29+
// interpret/interpretSynthetic may change the phase,
30+
// which would have unintended effects on types.
31+
val savedPhase = phase
32+
try interpretSynthetic(code) finally phase = savedPhase
33+
}
34+
35+
override def symbolOfLine(code: String): Symbol = {
36+
def asExpr(): Symbol = {
37+
val name = freshInternalVarName()
38+
// Typing it with a lazy val would give us the right type, but runs
39+
// into compiler bugs with things like existentials, so we compile it
40+
// behind a def and strip the NullaryMethodType which wraps the expr.
41+
val line = "def " + name + " = " + code
42+
43+
doInterpret(line) match {
44+
case IR.Success =>
45+
val sym0 = symbolOfTerm(name)
46+
// drop NullaryMethodType
47+
sym0.cloneSymbol setInfo exitingTyper(sym0.tpe_*.finalResultType)
48+
case _ => NoSymbol
49+
}
50+
}
51+
52+
def asDefn(): Symbol = {
53+
val old = repl.definedSymbolList.toSet
54+
55+
doInterpret(code) match {
56+
case IR.Success =>
57+
repl.definedSymbolList filterNot old match {
58+
case Nil => NoSymbol
59+
case sym :: Nil => sym
60+
case syms => NoSymbol.newOverloaded(NoPrefix, syms)
61+
}
62+
case _ => NoSymbol
63+
}
64+
}
65+
66+
def asError(): Symbol = {
67+
doInterpret(code)
68+
NoSymbol
69+
}
70+
71+
beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
72+
}
73+
74+
}

repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala

Lines changed: 54 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ package org.apache.spark.repl
1919

2020
import java.io.BufferedReader
2121

22+
// scalastyle:off println
2223
import scala.Predef.{println => _, _}
24+
// scalastyle:on println
2325
import scala.tools.nsc.Settings
2426
import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
2527
import scala.tools.nsc.util.stringFromStream
@@ -33,39 +35,53 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
3335
def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
3436
def this() = this(None, new JPrintWriter(Console.out, true))
3537

36-
def initializeSpark() {
37-
intp.beQuietDuring {
38-
processLine("""
39-
@transient val spark = if (org.apache.spark.repl.Main.sparkSession != null) {
40-
org.apache.spark.repl.Main.sparkSession
41-
} else {
42-
org.apache.spark.repl.Main.createSparkSession()
43-
}
44-
@transient val sc = {
45-
val _sc = spark.sparkContext
46-
if (_sc.getConf.getBoolean("spark.ui.reverseProxy", false)) {
47-
val proxyUrl = _sc.getConf.get("spark.ui.reverseProxyUrl", null)
48-
if (proxyUrl != null) {
49-
println(s"Spark Context Web UI is available at ${proxyUrl}/proxy/${_sc.applicationId}")
50-
} else {
51-
println(s"Spark Context Web UI is available at Spark Master Public URL")
52-
}
53-
} else {
54-
_sc.uiWebUrl.foreach {
55-
webUrl => println(s"Spark context Web UI available at ${webUrl}")
56-
}
57-
}
58-
println("Spark context available as 'sc' " +
59-
s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
60-
println("Spark session available as 'spark'.")
61-
_sc
38+
override def createInterpreter(): Unit = {
39+
intp = new SparkILoopInterpreter(settings, out, initializeSpark)
40+
}
41+
42+
val initializationCommands: Seq[String] = Seq(
43+
"""
44+
@transient val spark = if (org.apache.spark.repl.Main.sparkSession != null) {
45+
org.apache.spark.repl.Main.sparkSession
46+
} else {
47+
org.apache.spark.repl.Main.createSparkSession()
48+
}
49+
@transient val sc = {
50+
val _sc = spark.sparkContext
51+
if (_sc.getConf.getBoolean("spark.ui.reverseProxy", false)) {
52+
val proxyUrl = _sc.getConf.get("spark.ui.reverseProxyUrl", null)
53+
if (proxyUrl != null) {
54+
println(
55+
s"Spark Context Web UI is available at ${proxyUrl}/proxy/${_sc.applicationId}")
56+
} else {
57+
println(s"Spark Context Web UI is available at Spark Master Public URL")
6258
}
63-
""")
64-
processLine("import org.apache.spark.SparkContext._")
65-
processLine("import spark.implicits._")
66-
processLine("import spark.sql")
67-
processLine("import org.apache.spark.sql.functions._")
68-
replayCommandStack = Nil // remove above commands from session history.
59+
} else {
60+
_sc.uiWebUrl.foreach {
61+
webUrl => println(s"Spark context Web UI available at ${webUrl}")
62+
}
63+
}
64+
println("Spark context available as 'sc' " +
65+
s"(master = ${_sc.master}, app id = ${_sc.applicationId}).")
66+
println("Spark session available as 'spark'.")
67+
_sc
68+
}
69+
""",
70+
"import org.apache.spark.SparkContext._",
71+
"import spark.implicits._",
72+
"import spark.sql",
73+
"import org.apache.spark.sql.functions._"
74+
)
75+
76+
def initializeSpark(): Unit = {
77+
if (!intp.reporter.hasErrors) {
78+
// `savingReplayStack` removes the commands from session history.
79+
savingReplayStack {
80+
initializationCommands.foreach(intp quietRun _)
81+
}
82+
} else {
83+
throw new RuntimeException(s"Scala $versionString interpreter encountered " +
84+
"errors during initialization")
6985
}
7086
}
7187

@@ -89,21 +105,17 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
89105
/** Available commands */
90106
override def commands: List[LoopCommand] = standardCommands
91107

92-
/**
93-
* We override `loadFiles` because we need to initialize Spark *before* the REPL
94-
* sees any files, so that the Spark context is visible in those files. This is a bit of a
95-
* hack, but there isn't another hook available to us at this point.
96-
*/
97-
override def loadFiles(settings: Settings): Unit = {
98-
initializeSpark()
99-
super.loadFiles(settings)
100-
}
101-
102108
override def resetCommand(line: String): Unit = {
103109
super.resetCommand(line)
104110
initializeSpark()
105111
echo("Note that after :reset, state of SparkSession and SparkContext is unchanged.")
106112
}
113+
114+
override def replay(): Unit = {
115+
initializeSpark()
116+
super.replay()
117+
}
118+
107119
}
108120

109121
object SparkILoop {

0 commit comments

Comments
 (0)