Skip to content

Commit c06f88c

Browse files
Merge remote-tracking branch 'upstream/master' into SPARK-3278
2 parents 6046550 + 080ceb7 commit c06f88c

File tree

680 files changed

+17063
-6611
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

680 files changed

+17063
-6611
lines changed

.gitignore

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
*.ipr
66
*.iml
77
*.iws
8+
*.pyc
89
.idea/
910
.idea_modules/
1011
sbt/*.jar
@@ -49,9 +50,12 @@ dependency-reduced-pom.xml
4950
checkpoint
5051
derby.log
5152
dist/
52-
spark-*-bin.tar.gz
53+
dev/create-release/*txt
54+
dev/create-release/*final
55+
spark-*-bin-*.tgz
5356
unit-tests.log
5457
/lib/
58+
ec2/lib/
5559
rat-results.txt
5660
scalastyle.txt
5761
scalastyle-output.xml

.rat-excludes

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,3 +64,4 @@ dist/*
6464
logs
6565
.*scalastyle-output.xml
6666
.*dependency-reduced-pom.xml
67+
known_translations

LICENSE

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -646,7 +646,8 @@ THE SOFTWARE.
646646

647647
========================================================================
648648
For Scala Interpreter classes (all .scala files in repl/src/main/scala
649-
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala):
649+
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
650+
and for SerializableMapWrapper in JavaUtils.scala:
650651
========================================================================
651652

652653
Copyright (c) 2002-2013 EPFL

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ To build Spark and its example programs, run:
2626

2727
(You do not need to do this if you downloaded a pre-built package.)
2828
More detailed documentation is available from the project site, at
29-
["Building Spark with Maven"](http://spark.apache.org/docs/latest/building-with-maven.html).
29+
["Building Spark with Maven"](http://spark.apache.org/docs/latest/building-spark.html).
3030

3131
## Interactive Scala Shell
3232

assembly/pom.xml

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -169,16 +169,6 @@
169169
</build>
170170

171171
<profiles>
172-
<profile>
173-
<id>yarn-alpha</id>
174-
<dependencies>
175-
<dependency>
176-
<groupId>org.apache.spark</groupId>
177-
<artifactId>spark-yarn-alpha_${scala.binary.version}</artifactId>
178-
<version>${project.version}</version>
179-
</dependency>
180-
</dependencies>
181-
</profile>
182172
<profile>
183173
<id>yarn</id>
184174
<dependencies>

bin/beeline.cmd

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
@echo off
2+
3+
rem
4+
rem Licensed to the Apache Software Foundation (ASF) under one or more
5+
rem contributor license agreements. See the NOTICE file distributed with
6+
rem this work for additional information regarding copyright ownership.
7+
rem The ASF licenses this file to You under the Apache License, Version 2.0
8+
rem (the "License"); you may not use this file except in compliance with
9+
rem the License. You may obtain a copy of the License at
10+
rem
11+
rem http://www.apache.org/licenses/LICENSE-2.0
12+
rem
13+
rem Unless required by applicable law or agreed to in writing, software
14+
rem distributed under the License is distributed on an "AS IS" BASIS,
15+
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
rem See the License for the specific language governing permissions and
17+
rem limitations under the License.
18+
rem
19+
20+
set SPARK_HOME=%~dp0..
21+
cmd /V /E /C %SPARK_HOME%\bin\spark-class.cmd org.apache.hive.beeline.BeeLine %*

bin/compute-classpath.sh

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,11 @@ FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
2525

2626
. "$FWDIR"/bin/load-spark-env.sh
2727

28-
CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH"
28+
if [ -n "$SPARK_CLASSPATH" ]; then
29+
CLASSPATH="$SPARK_CLASSPATH:$SPARK_SUBMIT_CLASSPATH"
30+
else
31+
CLASSPATH="$SPARK_SUBMIT_CLASSPATH"
32+
fi
2933

3034
# Build up classpath
3135
if [ -n "$SPARK_CONF_DIR" ]; then
@@ -68,14 +72,14 @@ else
6872
assembly_folder="$ASSEMBLY_DIR"
6973
fi
7074

71-
num_jars="$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*\.jar" | wc -l)"
75+
num_jars="$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*\.jar$" | wc -l)"
7276
if [ "$num_jars" -eq "0" ]; then
7377
echo "Failed to find Spark assembly in $assembly_folder"
7478
echo "You need to build Spark before running this program."
7579
exit 1
7680
fi
7781
if [ "$num_jars" -gt "1" ]; then
78-
jars_list=$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*.jar")
82+
jars_list=$(ls "$assembly_folder" | grep "spark-assembly.*hadoop.*.jar$")
7983
echo "Found multiple Spark assembly jars in $assembly_folder:"
8084
echo "$jars_list"
8185
echo "Please remove all but one jar."
@@ -108,7 +112,7 @@ else
108112
datanucleus_dir="$FWDIR"/lib_managed/jars
109113
fi
110114

111-
datanucleus_jars="$(find "$datanucleus_dir" 2>/dev/null | grep "datanucleus-.*\\.jar")"
115+
datanucleus_jars="$(find "$datanucleus_dir" 2>/dev/null | grep "datanucleus-.*\\.jar$")"
112116
datanucleus_jars="$(echo "$datanucleus_jars" | tr "\n" : | sed s/:$//g)"
113117

114118
if [ -n "$datanucleus_jars" ]; then

bin/spark-shell

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,13 @@ source "$FWDIR"/bin/utils.sh
4545
SUBMIT_USAGE_FUNCTION=usage
4646
gatherSparkSubmitOpts "$@"
4747

48+
# SPARK-4161: scala does not assume use of the java classpath,
49+
# so we need to add the "-Dscala.usejavacp=true" flag mnually. We
50+
# do this specifically for the Spark shell because the scala REPL
51+
# has its own class loader, and any additional classpath specified
52+
# through spark.driver.extraClassPath is not automatically propagated.
53+
SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Dscala.usejavacp=true"
54+
4855
function main() {
4956
if $cygwin; then
5057
# Workaround for issue involving JLine and Cygwin

bin/spark-shell2.cmd

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,23 @@ rem
1919

2020
set SPARK_HOME=%~dp0..
2121

22-
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd --class org.apache.spark.repl.Main %* spark-shell
22+
echo "%*" | findstr " --help -h" >nul
23+
if %ERRORLEVEL% equ 0 (
24+
call :usage
25+
exit /b 0
26+
)
27+
28+
call %SPARK_HOME%\bin\windows-utils.cmd %*
29+
if %ERRORLEVEL% equ 1 (
30+
call :usage
31+
exit /b 1
32+
)
33+
34+
cmd /V /E /C %SPARK_HOME%\bin\spark-submit.cmd --class org.apache.spark.repl.Main %SUBMISSION_OPTS% spark-shell %APPLICATION_OPTS%
35+
36+
exit /b 0
37+
38+
:usage
39+
echo "Usage: .\bin\spark-shell.cmd [options]" >&2
40+
%SPARK_HOME%\bin\spark-submit --help 2>&1 | findstr /V "Usage" 1>&2
41+
exit /b 0

bin/spark-sql

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,8 @@
2323
# Enter posix mode for bash
2424
set -o posix
2525

26+
# NOTE: This exact class name is matched downstream by SparkSubmit.
27+
# Any changes need to be reflected there.
2628
CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
2729

2830
# Figure out where Spark is installed

0 commit comments

Comments
 (0)