|
17 | 17 | # limitations under the License. |
18 | 18 | # |
19 | 19 |
|
20 | | -cygwin=false |
21 | | -case "`uname`" in |
22 | | - CYGWIN*) cygwin=true;; |
23 | | -esac |
24 | | - |
25 | 20 | SCALA_VERSION=2.10 |
26 | 21 |
|
27 | | -# Figure out where the Scala framework is installed |
28 | 22 | FWDIR="$(cd `dirname $0`/..; pwd)" |
29 | | - |
30 | | -# Export this as SPARK_HOME |
31 | 23 | export SPARK_HOME="$FWDIR" |
32 | | - |
33 | | -. $FWDIR/bin/load-spark-env.sh |
34 | | - |
35 | | -if [ -z "$1" ]; then |
36 | | - echo "Usage: run-example <example-class> [<args>]" >&2 |
37 | | - exit 1 |
38 | | -fi |
39 | | - |
40 | | -# Figure out the JAR file that our examples were packaged into. This includes a bit of a hack |
41 | | -# to avoid the -sources and -doc packages that are built by publish-local. |
42 | 24 | EXAMPLES_DIR="$FWDIR"/examples |
43 | 25 |
|
44 | 26 | if [ -f "$FWDIR/RELEASE" ]; then |
|
49 | 31 |
|
50 | 32 | if [[ -z $SPARK_EXAMPLES_JAR ]]; then |
51 | 33 | echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" >&2 |
52 | | - echo "You need to build Spark with sbt/sbt assembly before running this program" >&2 |
| 34 | + echo "You need to build Spark before running this program" >&2 |
53 | 35 | exit 1 |
54 | 36 | fi |
55 | 37 |
|
| 38 | +EXAMPLE_MASTER=${MASTER:-"local[*]"} |
56 | 39 |
|
57 | | -# Since the examples JAR ideally shouldn't include spark-core (that dependency should be |
58 | | -# "provided"), also add our standard Spark classpath, built using compute-classpath.sh. |
59 | | -CLASSPATH=`$FWDIR/bin/compute-classpath.sh` |
60 | | -CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH" |
61 | | - |
62 | | -if $cygwin; then |
63 | | - CLASSPATH=`cygpath -wp $CLASSPATH` |
64 | | - export SPARK_EXAMPLES_JAR=`cygpath -w $SPARK_EXAMPLES_JAR` |
65 | | -fi |
66 | | - |
67 | | -# Find java binary |
68 | | -if [ -n "${JAVA_HOME}" ]; then |
69 | | - RUNNER="${JAVA_HOME}/bin/java" |
70 | | -else |
71 | | - if [ `command -v java` ]; then |
72 | | - RUNNER="java" |
73 | | - else |
74 | | - echo "JAVA_HOME is not set" >&2 |
75 | | - exit 1 |
76 | | - fi |
77 | | -fi |
78 | | - |
79 | | -# Set JAVA_OPTS to be able to load native libraries and to set heap size |
80 | | -JAVA_OPTS="$SPARK_JAVA_OPTS" |
81 | | -# Load extra JAVA_OPTS from conf/java-opts, if it exists |
82 | | -if [ -e "$FWDIR/conf/java-opts" ] ; then |
83 | | - JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`" |
| 40 | +if [ -n "$1" ]; then |
| 41 | + EXAMPLE_CLASS="$1" |
| 42 | + shift |
| 43 | +else |
| 44 | + echo "usage: ./bin/run-example <example-class> [example-args]" |
| 45 | + echo " - set MASTER=XX to use a specific master" |
| 46 | + echo " - can use abbreviated example class name (e.g. SparkPi, mllib.MovieLensALS)" |
| 47 | + echo |
| 48 | + exit -1 |
84 | 49 | fi |
85 | | -export JAVA_OPTS |
86 | 50 |
|
87 | | -if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then |
88 | | - echo -n "Spark Command: " |
89 | | - echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" |
90 | | - echo "========================================" |
91 | | - echo |
| 51 | +if [[ ! $EXAMPLE_CLASS == org.apache.spark.examples* ]]; then |
| 52 | + EXAMPLE_CLASS="org.apache.spark.examples.$EXAMPLE_CLASS" |
92 | 53 | fi |
93 | 54 |
|
94 | | -exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" |
| 55 | +./bin/spark-submit \ |
| 56 | + --master $EXAMPLE_MASTER \ |
| 57 | + --class $EXAMPLE_CLASS \ |
| 58 | + $SPARK_EXAMPLES_JAR \ |
| 59 | + "$@" |
0 commit comments