Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 1 addition & 15 deletions bin/pyspark
Original file line number Diff line number Diff line change
Expand Up @@ -17,24 +17,10 @@
# limitations under the License.
#

# Figure out where Spark is installed
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"

source "$SPARK_HOME"/bin/load-spark-env.sh

function usage() {
if [ -n "$1" ]; then
echo $1
fi
echo "Usage: ./bin/pyspark [options]" 1>&2
"$SPARK_HOME"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
exit $2
}
export -f usage

if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
fi
export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"

# In Spark <= 1.1, setting IPYTHON=1 would cause the driver to be launched using the `ipython`
# executable, while the worker would still be launched using PYSPARK_PYTHON.
Expand Down
1 change: 1 addition & 0 deletions bin/pyspark2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

call %SPARK_HOME%\bin\load-spark-env.cmd
set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options]

rem Figure out which Python to use.
if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
Expand Down
13 changes: 1 addition & 12 deletions bin/spark-class
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e

# Figure out where Spark is installed
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"

. "$SPARK_HOME"/bin/load-spark-env.sh

if [ -z "$1" ]; then
echo "Usage: spark-class <class> [<args>]" 1>&2
exit 1
fi

# Find the java binary
if [ -n "${JAVA_HOME}" ]; then
RUNNER="${JAVA_HOME}/bin/java"
Expand Down Expand Up @@ -98,9 +92,4 @@ CMD=()
while IFS= read -d '' -r ARG; do
CMD+=("$ARG")
done < <("$RUNNER" -cp "$LAUNCH_CLASSPATH" org.apache.spark.launcher.Main "$@")

if [ "${CMD[0]}" = "usage" ]; then
"${CMD[@]}"
else
exec "${CMD[@]}"
fi
exec "${CMD[@]}"
15 changes: 1 addition & 14 deletions bin/spark-shell
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,7 @@ esac
set -o posix

export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"

usage() {
if [ -n "$1" ]; then
echo "$1"
fi
echo "Usage: ./bin/spark-shell [options]"
"$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
exit "$2"
}
export -f usage

if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage "" 0
fi
export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]"

# SPARK-4161: scala does not assume use of the java classpath,
# so we need to add the "-Dscala.usejavacp=true" flag manually. We
Expand Down
21 changes: 2 additions & 19 deletions bin/spark-shell2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,7 @@ rem limitations under the License.
rem

set SPARK_HOME=%~dp0..

echo "%*" | findstr " \<--help\> \<-h\>" >nul
if %ERRORLEVEL% equ 0 (
call :usage
exit /b 0
)
set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options]

rem SPARK-4161: scala does not assume use of the java classpath,
rem so we need to add the "-Dscala.usejavacp=true" flag manually. We
Expand All @@ -37,16 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" (
set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"

:run_shell
call %SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main %*
set SPARK_ERROR_LEVEL=%ERRORLEVEL%
if not "x%SPARK_LAUNCHER_USAGE_ERROR%"=="x" (
call :usage
exit /b 1
)
exit /b %SPARK_ERROR_LEVEL%

:usage
echo %SPARK_LAUNCHER_USAGE_ERROR%
echo "Usage: .\bin\spark-shell.cmd [options]" >&2
call %SPARK_HOME%\bin\spark-submit2.cmd --help 2>&1 | findstr /V "Usage" 1>&2
goto :eof
%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main %*
39 changes: 2 additions & 37 deletions bin/spark-sql
Original file line number Diff line number Diff line change
Expand Up @@ -17,41 +17,6 @@
# limitations under the License.
#

#
# Shell script for starting the Spark SQL CLI

# Enter posix mode for bash
set -o posix

# NOTE: This exact class name is matched downstream by SparkSubmit.
# Any changes need to be reflected there.
export CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"

# Figure out where Spark is installed
export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"

function usage {
if [ -n "$1" ]; then
echo "$1"
fi
echo "Usage: ./bin/spark-sql [options] [cli option]"
pattern="usage"
pattern+="\|Spark assembly has been built with Hive"
pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
pattern+="\|Spark Command: "
pattern+="\|--help"
pattern+="\|======="

"$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
echo
echo "CLI options:"
"$FWDIR"/bin/spark-class "$CLASS" --help 2>&1 | grep -v "$pattern" 1>&2
exit "$2"
}
export -f usage

if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage "" 0
fi

exec "$FWDIR"/bin/spark-submit --class "$CLASS" "$@"
export _SPARK_CMD_USAGE="Usage: ./bin/spark-sql [options] [cli option]"
exec "$FWDIR"/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"
12 changes: 0 additions & 12 deletions bin/spark-submit
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,4 @@ SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
# disable randomized hash for string in Python 3.3+
export PYTHONHASHSEED=0

# Only define a usage function if an upstream script hasn't done so.
if ! type -t usage >/dev/null 2>&1; then
usage() {
if [ -n "$1" ]; then
echo "$1"
fi
"$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit --help
exit "$2"
}
export -f usage
fi

exec "$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"
13 changes: 1 addition & 12 deletions bin/spark-submit2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,4 @@ rem disable randomized hash for string in Python 3.3+
set PYTHONHASHSEED=0

set CLASS=org.apache.spark.deploy.SparkSubmit
call %~dp0spark-class2.cmd %CLASS% %*
set SPARK_ERROR_LEVEL=%ERRORLEVEL%
if not "x%SPARK_LAUNCHER_USAGE_ERROR%"=="x" (
call :usage
exit /b 1
)
exit /b %SPARK_ERROR_LEVEL%

:usage
echo %SPARK_LAUNCHER_USAGE_ERROR%
call %SPARK_HOME%\bin\spark-class2.cmd %CLASS% --help
goto :eof
%~dp0spark-class2.cmd %CLASS% %*
18 changes: 1 addition & 17 deletions bin/sparkR
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,7 @@
# limitations under the License.
#

# Figure out where Spark is installed
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"

source "$SPARK_HOME"/bin/load-spark-env.sh

function usage() {
if [ -n "$1" ]; then
echo $1
fi
echo "Usage: ./bin/sparkR [options]" 1>&2
"$SPARK_HOME"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
exit $2
}
export -f usage

if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
usage
fi

export _SPARK_CMD_USAGE="Usage: ./bin/sparkR [options]"
exec "$SPARK_HOME"/bin/spark-submit sparkr-shell-main "$@"
10 changes: 5 additions & 5 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -82,13 +82,13 @@ object SparkSubmit {
private val CLASS_NOT_FOUND_EXIT_STATUS = 101

// Exposed for testing
private[spark] var exitFn: () => Unit = () => System.exit(1)
private[spark] var exitFn: Int => Unit = (exitCode: Int) => System.exit(exitCode)
private[spark] var printStream: PrintStream = System.err
private[spark] def printWarning(str: String): Unit = printStream.println("Warning: " + str)
private[spark] def printErrorAndExit(str: String): Unit = {
printStream.println("Error: " + str)
printStream.println("Run with --help for usage help or --verbose for debug output")
exitFn()
exitFn(1)
}
private[spark] def printVersionAndExit(): Unit = {
printStream.println("""Welcome to
Expand All @@ -99,7 +99,7 @@ object SparkSubmit {
/_/
""".format(SPARK_VERSION))
printStream.println("Type --help for more information.")
exitFn()
exitFn(0)
}

def main(args: Array[String]): Unit = {
Expand Down Expand Up @@ -160,7 +160,7 @@ object SparkSubmit {
// detect exceptions with empty stack traces here, and treat them differently.
if (e.getStackTrace().length == 0) {
printStream.println(s"ERROR: ${e.getClass().getName()}: ${e.getMessage()}")
exitFn()
exitFn(1)
} else {
throw e
}
Expand Down Expand Up @@ -700,7 +700,7 @@ object SparkSubmit {
/**
* Return whether the given main class represents a sql shell.
*/
private def isSqlShell(mainClass: String): Boolean = {
private[deploy] def isSqlShell(mainClass: String): Boolean = {
mainClass == "org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver"
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,15 @@

package org.apache.spark.deploy

import java.io.{ByteArrayOutputStream, PrintStream}
import java.lang.reflect.InvocationTargetException
import java.net.URI
import java.util.{List => JList}
import java.util.jar.JarFile

import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.io.Source

import org.apache.spark.deploy.SparkSubmitAction._
import org.apache.spark.launcher.SparkSubmitArgumentsParser
Expand Down Expand Up @@ -412,6 +415,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
case VERSION =>
SparkSubmit.printVersionAndExit()

case USAGE_ERROR =>
printUsageAndExit(1)

case _ =>
throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
}
Expand Down Expand Up @@ -449,11 +455,14 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
if (unknownParam != null) {
outStream.println("Unknown/unsupported param " + unknownParam)
}
outStream.println(
val command = sys.env.get("_SPARK_CMD_USAGE").getOrElse(
"""Usage: spark-submit [options] <app jar | python file> [app arguments]
|Usage: spark-submit --kill [submission ID] --master [spark://...]
|Usage: spark-submit --status [submission ID] --master [spark://...]
|
|Usage: spark-submit --status [submission ID] --master [spark://...]""".stripMargin)
outStream.println(command)

outStream.println(
"""
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn, or local.
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
Expand Down Expand Up @@ -525,6 +534,65 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
| delegation tokens periodically.
""".stripMargin
)
SparkSubmit.exitFn()

if (SparkSubmit.isSqlShell(mainClass)) {
outStream.println("CLI options:")
outStream.println(getSqlShellOptions())
}

SparkSubmit.exitFn(exitCode)
}

/**
* Run the Spark SQL CLI main class with the "--help" option and catch its output. Then filter
* the results to remove unwanted lines.
*
* Since the CLI will call `System.exit()`, we install a security manager to prevent that call
* from working, and restore the original one afterwards.
*/
private def getSqlShellOptions(): String = {
val currentOut = System.out
val currentErr = System.err
val currentSm = System.getSecurityManager()
try {
val out = new ByteArrayOutputStream()
val stream = new PrintStream(out)
System.setOut(stream)
System.setErr(stream)

val sm = new SecurityManager() {
override def checkExit(status: Int): Unit = {
throw new SecurityException()
}

override def checkPermission(perm: java.security.Permission): Unit = {}
}
System.setSecurityManager(sm)

try {
Class.forName(mainClass).getMethod("main", classOf[Array[String]])
.invoke(null, Array(HELP))
} catch {
case e: InvocationTargetException =>
// Ignore SecurityException, since we throw it above.
if (!e.getCause().isInstanceOf[SecurityException]) {
throw e
}
}

stream.flush()

// Get the output and discard any unnecessary lines from it.
Source.fromString(new String(out.toByteArray())).getLines
.filter { line =>
!line.startsWith("log4j") && !line.startsWith("usage")
}
.mkString("\n")
} finally {
System.setSecurityManager(currentSm)
System.setOut(currentOut)
System.setErr(currentErr)
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class SparkSubmitSuite
SparkSubmit.printStream = printStream

@volatile var exitedCleanly = false
SparkSubmit.exitFn = () => exitedCleanly = true
SparkSubmit.exitFn = (_) => exitedCleanly = true

val thread = new Thread {
override def run() = try {
Expand Down
Loading