2727SCALA_VERSION=2.10
2828
2929# Figure out where Spark is installed
30- FWDIR=" $( cd ` dirname $0 ` /..; pwd) "
30+ FWDIR=" $( cd " ` dirname " $0 " ` " /..; pwd) "
3131
3232# Export this as SPARK_HOME
3333export SPARK_HOME=" $FWDIR "
3434
35- . $FWDIR /bin/load-spark-env.sh
35+ . " $FWDIR " /bin/load-spark-env.sh
3636
3737if [ -z " $1 " ]; then
3838 echo " Usage: spark-class <class> [<args>]" 1>&2
105105 exit 1
106106 fi
107107fi
108- JAVA_VERSION=$( $RUNNER -version 2>&1 | sed ' s/java version "\(.*\)\.\(.*\)\..*"/\1\2/; 1q' )
108+ JAVA_VERSION=$( " $RUNNER " -version 2>&1 | sed ' s/java version "\(.*\)\.\(.*\)\..*"/\1\2/; 1q' )
109109
110110# Set JAVA_OPTS to be able to load native libraries and to set heap size
111111if [ " $JAVA_VERSION " -ge 18 ]; then
@@ -117,7 +117,7 @@ JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM"
117117
118118# Load extra JAVA_OPTS from conf/java-opts, if it exists
119119if [ -e " $FWDIR /conf/java-opts" ] ; then
120- JAVA_OPTS=" $JAVA_OPTS ` cat $FWDIR /conf/java-opts` "
120+ JAVA_OPTS=" $JAVA_OPTS ` cat " $FWDIR " /conf/java-opts` "
121121fi
122122
123123# Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala!
@@ -126,21 +126,21 @@ TOOLS_DIR="$FWDIR"/tools
126126SPARK_TOOLS_JAR=" "
127127if [ -e " $TOOLS_DIR " /target/scala-$SCALA_VERSION /spark-tools* [0-9Tg].jar ]; then
128128 # Use the JAR from the SBT build
129- export SPARK_TOOLS_JAR=` ls " $TOOLS_DIR " /target/scala-$SCALA_VERSION /spark-tools* [0-9Tg].jar`
129+ export SPARK_TOOLS_JAR=" ` ls " $TOOLS_DIR " /target/scala-$SCALA_VERSION /spark-tools* [0-9Tg].jar` "
130130fi
131131if [ -e " $TOOLS_DIR " /target/spark-tools* [0-9Tg].jar ]; then
132132 # Use the JAR from the Maven build
133133 # TODO: this also needs to become an assembly!
134- export SPARK_TOOLS_JAR=` ls " $TOOLS_DIR " /target/spark-tools* [0-9Tg].jar`
134+ export SPARK_TOOLS_JAR=" ` ls " $TOOLS_DIR " /target/spark-tools* [0-9Tg].jar` "
135135fi
136136
137137# Compute classpath using external script
138- classpath_output=$( $FWDIR /bin/compute-classpath.sh)
138+ classpath_output=$( " $FWDIR " /bin/compute-classpath.sh)
139139if [[ " $? " != " 0" ]]; then
140140 echo " $classpath_output "
141141 exit 1
142142else
143- CLASSPATH=$classpath_output
143+ CLASSPATH=" $classpath_output "
144144fi
145145
146146if [[ " $1 " =~ org.apache.spark.tools.* ]]; then
@@ -153,9 +153,9 @@ if [[ "$1" =~ org.apache.spark.tools.* ]]; then
153153fi
154154
155155if $cygwin ; then
156- CLASSPATH=` cygpath -wp $CLASSPATH `
156+ CLASSPATH=" ` cygpath -wp " $CLASSPATH " ` "
157157 if [ " $1 " == " org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
158- export SPARK_TOOLS_JAR=` cygpath -w $SPARK_TOOLS_JAR `
158+ export SPARK_TOOLS_JAR=" ` cygpath -w " $SPARK_TOOLS_JAR " ` "
159159 fi
160160fi
161161export CLASSPATH
0 commit comments