Skip to content
55 changes: 13 additions & 42 deletions bin/spark-sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
set -o posix

CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
CLASS_NOT_FOUND_EXIT_STATUS=1

# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
Expand All @@ -43,52 +44,22 @@ function usage {
$FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}

function ensure_arg_number {
arg_number=$1
at_least=$2

if [[ $arg_number -lt $at_least ]]; then
usage
exit 1
fi
}

if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi

CLI_ARGS=()
SUBMISSION_ARGS=()

while (($#)); do
case $1 in
-d | --define | --database | -f | -h | --hiveconf | --hivevar | -i | -p)
ensure_arg_number $# 2
CLI_ARGS+=("$1"); shift
CLI_ARGS+=("$1"); shift
;;
source $FWDIR/bin/utils.sh
SUBMIT_USAGE_FUNCTION=usage
gatherSparkSubmitOpts "$@"

-e)
ensure_arg_number $# 2
CLI_ARGS+=("$1"); shift
CLI_ARGS+=("$1"); shift
;;
"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
exit_status=$?

-s | --silent)
CLI_ARGS+=("$1"); shift
;;

-v | --verbose)
# Both SparkSubmit and SparkSQLCLIDriver recognizes -v | --verbose
CLI_ARGS+=("$1")
SUBMISSION_ARGS+=("$1"); shift
;;

*)
SUBMISSION_ARGS+=("$1"); shift
;;
esac
done
if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
echo
echo "Failed to load Spark SQL CLI main class $CLASS."
echo "You need to build Spark with -Phive."
fi

exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"
exit $exit_status
14 changes: 13 additions & 1 deletion core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ object SparkSubmit {
private val SPARK_SHELL = "spark-shell"
private val PYSPARK_SHELL = "pyspark-shell"

private val CLASS_NOT_FOUND_EXIT_STATUS = 1

// Exposed for testing
private[spark] var exitFn: () => Unit = () => System.exit(-1)
private[spark] var printStream: PrintStream = System.err
Expand Down Expand Up @@ -311,8 +313,18 @@ object SparkSubmit {
System.setProperty(key, value)
}

val mainClass = Class.forName(childMainClass, true, loader)
var mainClass: Class[_] = null

try {
mainClass = Class.forName(childMainClass, true, loader)
} catch {
case e: ClassNotFoundException =>
e.printStackTrace(printStream)
System.exit(CLASS_NOT_FOUND_EXIT_STATUS)
}

val mainMethod = mainClass.getMethod("main", new Array[String](0).getClass)

try {
mainMethod.invoke(null, childArgs.toArray)
} catch {
Expand Down
39 changes: 13 additions & 26 deletions sbin/start-thriftserver.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ set -o posix
FWDIR="$(cd `dirname $0`/..; pwd)"

CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
CLASS_NOT_FOUND_EXIT_STATUS=1

function usage {
echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
Expand All @@ -43,36 +44,22 @@ function usage {
$FWDIR/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
}

function ensure_arg_number {
arg_number=$1
at_least=$2

if [[ $arg_number -lt $at_least ]]; then
usage
exit 1
fi
}

if [[ "$@" = --help ]] || [[ "$@" = -h ]]; then
if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
exit 0
fi

THRIFT_SERVER_ARGS=()
SUBMISSION_ARGS=()
source $FWDIR/bin/utils.sh
SUBMIT_USAGE_FUNCTION=usage
gatherSparkSubmitOpts "$@"

while (($#)); do
case $1 in
--hiveconf)
ensure_arg_number $# 2
THRIFT_SERVER_ARGS+=("$1"); shift
THRIFT_SERVER_ARGS+=("$1"); shift
;;
"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_OPTS[@]}" spark-internal "${APPLICATION_OPTS[@]}"
exit_status=$?

*)
SUBMISSION_ARGS+=("$1"); shift
;;
esac
done
if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
echo
echo "Failed to load Hive Thrift server main class $CLASS."
echo "You need to build Spark with -Phive."
fi

exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${THRIFT_SERVER_ARGS[@]}"
exit $exit_status