Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions bin/spark-sql
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
set -o posix

CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
CLASS_NOT_FOUND_EXIT_STATUS=1

# Figure out where Spark is installed
FWDIR="$(cd `dirname $0`/..; pwd)"
Expand Down Expand Up @@ -75,7 +76,7 @@ while (($#)); do
CLI_ARGS+=("$1"); shift
;;

-s | --silent)
-S | --silent)
CLI_ARGS+=("$1"); shift
;;

Expand All @@ -91,4 +92,13 @@ while (($#)); do
esac
done

exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"
"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${CLI_ARGS[@]}"
exit_status=$?

if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
echo
echo "Failed to load Spark SQL CLI main class $CLASS."
echo "You need to build Spark with -Phive."
fi

exit $exit_status
14 changes: 13 additions & 1 deletion core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ object SparkSubmit {
private val SPARK_SHELL = "spark-shell"
private val PYSPARK_SHELL = "pyspark-shell"

private val CLASS_NOT_FOUND_EXIT_STATUS = 1

// Exposed for testing
private[spark] var exitFn: () => Unit = () => System.exit(-1)
private[spark] var printStream: PrintStream = System.err
Expand Down Expand Up @@ -311,8 +313,18 @@ object SparkSubmit {
System.setProperty(key, value)
}

val mainClass = Class.forName(childMainClass, true, loader)
var mainClass: Class[_] = null

try {
mainClass = Class.forName(childMainClass, true, loader)
} catch {
case e: ClassNotFoundException =>
e.printStackTrace(printStream)
System.exit(CLASS_NOT_FOUND_EXIT_STATUS)
}

val mainMethod = mainClass.getMethod("main", new Array[String](0).getClass)

try {
mainMethod.invoke(null, childArgs.toArray)
} catch {
Expand Down
12 changes: 11 additions & 1 deletion sbin/start-thriftserver.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ set -o posix
FWDIR="$(cd `dirname $0`/..; pwd)"

CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
CLASS_NOT_FOUND_EXIT_STATUS=1

function usage {
echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
Expand Down Expand Up @@ -75,4 +76,13 @@ while (($#)); do
esac
done

exec "$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${THRIFT_SERVER_ARGS[@]}"
"$FWDIR"/bin/spark-submit --class $CLASS "${SUBMISSION_ARGS[@]}" spark-internal "${THRIFT_SERVER_ARGS[@]}"
exit_status=$?

if [[ exit_status -eq CLASS_NOT_FOUND_EXIT_STATUS ]]; then
echo
echo "Failed to load Hive Thrift server main class $CLASS."
echo "You need to build Spark with -Phive."
fi

exit $exit_status
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,10 @@ class HiveThriftServer2Suite extends FunSuite with BeforeAndAfterAll with TestUt
""".stripMargin.split("\\s+")

val pb = new ProcessBuilder(command ++ args: _*)
val environment = pb.environment()
process = pb.start()
inputReader = new BufferedReader(new InputStreamReader(process.getInputStream))
errorReader = new BufferedReader(new InputStreamReader(process.getErrorStream))
waitForOutput(inputReader, "ThriftBinaryCLIService listening on", 300000)
waitForOutput(inputReader, "ThriftBinaryCLIService listening on", 30000)

// Spawn a thread to read the output from the forked process.
// Note that this is necessary since in some configurations, log4j could be blocked
Expand Down