@@ -33,13 +33,13 @@ export SPARK_HOME="$FWDIR"
3333. $FWDIR /bin/load-spark-env.sh
3434
3535if [ -z " $1 " ]; then
36- echo " Usage: spark-class <class> [<args>]" >&2
36+ echo " Usage: spark-class <class> [<args>]" 1 >&2
3737 exit 1
3838fi
3939
4040if [ -n " $SPARK_MEM " ]; then
41- echo " Warning: SPARK_MEM is deprecated, please use a more specific config option"
42- echo " (e.g., spark.executor.memory or SPARK_DRIVER_MEMORY)."
41+ echo -e " Warning: SPARK_MEM is deprecated, please use a more specific config option" 1>&2
42+ echo -e " (e.g., spark.executor.memory or SPARK_DRIVER_MEMORY)." 1>&2
4343fi
4444
4545# Use SPARK_MEM or 512m as the default memory, to be overridden by specific options
108108export JAVA_OPTS
109109# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!
110110
111- if [ ! -f " $FWDIR /RELEASE" ]; then
112- # Exit if the user hasn't compiled Spark
113- num_jars=$( ls " $FWDIR " /assembly/target/scala-$SCALA_VERSION / | grep " spark-assembly.*hadoop.*.jar" | wc -l)
114- jars_list=$( ls " $FWDIR " /assembly/target/scala-$SCALA_VERSION / | grep " spark-assembly.*hadoop.*.jar" )
115- if [ " $num_jars " -eq " 0" ]; then
116- echo " Failed to find Spark assembly in $FWDIR /assembly/target/scala-$SCALA_VERSION /" >&2
117- echo " You need to build Spark before running this program." >&2
118- exit 1
119- fi
120- if [ " $num_jars " -gt " 1" ]; then
121- echo " Found multiple Spark assembly jars in $FWDIR /assembly/target/scala-$SCALA_VERSION :" >&2
122- echo " $jars_list "
123- echo " Please remove all but one jar."
124- exit 1
125- fi
126- fi
127-
128111TOOLS_DIR=" $FWDIR " /tools
129112SPARK_TOOLS_JAR=" "
130- if [ -e " $TOOLS_DIR " /target/scala-$SCALA_VERSION /* assembly * [0-9Tg].jar ]; then
113+ if [ -e " $TOOLS_DIR " /target/scala-$SCALA_VERSION /spark-tools * [0-9Tg].jar ]; then
131114 # Use the JAR from the SBT build
132- export SPARK_TOOLS_JAR=` ls " $TOOLS_DIR " /target/scala-$SCALA_VERSION /* assembly * [0-9Tg].jar`
115+ export SPARK_TOOLS_JAR=` ls " $TOOLS_DIR " /target/scala-$SCALA_VERSION /spark-tools * [0-9Tg].jar`
133116fi
134117if [ -e " $TOOLS_DIR " /target/spark-tools* [0-9Tg].jar ]; then
135118 # Use the JAR from the Maven build
@@ -147,6 +130,11 @@ else
147130fi
148131
149132if [[ " $1 " =~ org.apache.spark.tools.* ]]; then
133+ if test -z " $SPARK_TOOLS_JAR " ; then
134+ echo " Failed to find Spark Tools Jar in $FWDIR /tools/target/scala-$SCALA_VERSION /" 1>&2
135+ echo " You need to build spark before running $1 ." 1>&2
136+ exit 1
137+ fi
150138 CLASSPATH=" $CLASSPATH :$SPARK_TOOLS_JAR "
151139fi
152140
159147export CLASSPATH
160148
161149if [ " $SPARK_PRINT_LAUNCH_COMMAND " == " 1" ]; then
162- echo -n " Spark Command: "
163- echo " $RUNNER " -cp " $CLASSPATH " $JAVA_OPTS " $@ "
164- echo " ========================================"
165- echo
150+ echo -n " Spark Command: " 1>&2
151+ echo " $RUNNER " -cp " $CLASSPATH " $JAVA_OPTS " $@ " 1>&2
152+ echo -e " ========================================\n" 1>&2
166153fi
167154
168155exec " $RUNNER " -cp " $CLASSPATH " $JAVA_OPTS " $@ "
0 commit comments