diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala index 080ba12c2f0d1..49f00cb10179e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala @@ -34,35 +34,21 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin @tailrec private def parse(args: List[String]): Unit = { - if (args.length == 1) { - setLogDirectory(args.head) - } else { - args match { - case ("--dir" | "-d") :: value :: tail => - setLogDirectory(value) - parse(tail) + args match { + case ("--help" | "-h") :: tail => + printUsageAndExit(0) - case ("--help" | "-h") :: tail => - printUsageAndExit(0) + case ("--properties-file") :: value :: tail => + propertiesFile = value + parse(tail) - case ("--properties-file") :: value :: tail => - propertiesFile = value - parse(tail) + case Nil => - case Nil => - - case _ => - printUsageAndExit(1) - } + case _ => + printUsageAndExit(1) } } - private def setLogDirectory(value: String): Unit = { - logWarning("Setting log directory through the command line is deprecated as of " + - "Spark 1.1.0. Please set this through spark.history.fs.logDirectory instead.") - conf.set("spark.history.fs.logDirectory", value) - } - // This mutates the SparkConf, so all accesses to it must be made after this line Utils.loadDefaultSparkProperties(conf, propertiesFile) @@ -73,8 +59,6 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin |Usage: HistoryServer [options] | |Options: - | DIR Deprecated; set spark.history.fs.logDirectory directly - | --dir DIR (-d DIR) Deprecated; set spark.history.fs.logDirectory directly | --properties-file FILE Path to a custom Spark properties file. | Default is conf/spark-defaults.conf. | diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala index de321db845a66..37954826af90c 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerArgumentsSuite.scala @@ -40,18 +40,6 @@ class HistoryServerArgumentsSuite extends SparkFunSuite { assert(conf.get("spark.testing") === "true") } - test("Directory Arguments Parsing --dir or -d") { - val argStrings = Array("--dir", "src/test/resources/spark-events1") - val hsa = new HistoryServerArguments(conf, argStrings) - assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events1") - } - - test("Directory Param can also be set directly") { - val argStrings = Array("src/test/resources/spark-events2") - val hsa = new HistoryServerArguments(conf, argStrings) - assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events2") - } - test("Properties File Arguments Parsing --properties-file") { val tmpDir = Utils.createTempDir() val outFile = File.createTempFile("test-load-spark-properties", "test", tmpDir) diff --git a/sbin/start-history-server.sh b/sbin/start-history-server.sh index 38a43b98c3992..71dace47767cb 100755 --- a/sbin/start-history-server.sh +++ b/sbin/start-history-server.sh @@ -28,7 +28,22 @@ if [ -z "${SPARK_HOME}" ]; then export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)" fi +# NOTE: This exact class name is matched downstream by SparkSubmit. +# Any changes need to be reflected there. +CLASS="org.apache.spark.deploy.history.HistoryServer" + +if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then + echo "Usage: ./sbin/start-history-server.sh [options]" + pattern="Usage:" + pattern+="\|Using Spark's default log4j profile:" + pattern+="\|Started daemon with process name" + pattern+="\|Registered signal handler for" + + "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2 + exit 1 +fi + . "${SPARK_HOME}/sbin/spark-config.sh" . "${SPARK_HOME}/bin/load-spark-env.sh" -exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 "$@" +exec "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS 1 "$@"