Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,35 +34,21 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin

@tailrec
private def parse(args: List[String]): Unit = {
if (args.length == 1) {
setLogDirectory(args.head)
} else {
args match {
case ("--dir" | "-d") :: value :: tail =>
setLogDirectory(value)
parse(tail)
args match {
case ("--help" | "-h") :: tail =>
printUsageAndExit(0)

case ("--help" | "-h") :: tail =>
printUsageAndExit(0)
case ("--properties-file") :: value :: tail =>
propertiesFile = value
parse(tail)

case ("--properties-file") :: value :: tail =>
propertiesFile = value
parse(tail)
case Nil =>

case Nil =>

case _ =>
printUsageAndExit(1)
}
case _ =>
printUsageAndExit(1)
}
}

private def setLogDirectory(value: String): Unit = {
logWarning("Setting log directory through the command line is deprecated as of " +
"Spark 1.1.0. Please set this through spark.history.fs.logDirectory instead.")
conf.set("spark.history.fs.logDirectory", value)
}

// This mutates the SparkConf, so all accesses to it must be made after this line
Utils.loadDefaultSparkProperties(conf, propertiesFile)

Expand All @@ -73,8 +59,6 @@ private[history] class HistoryServerArguments(conf: SparkConf, args: Array[Strin
|Usage: HistoryServer [options]
|
|Options:
| DIR Deprecated; set spark.history.fs.logDirectory directly
| --dir DIR (-d DIR) Deprecated; set spark.history.fs.logDirectory directly
| --properties-file FILE Path to a custom Spark properties file.
| Default is conf/spark-defaults.conf.
|
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,18 +40,6 @@ class HistoryServerArgumentsSuite extends SparkFunSuite {
assert(conf.get("spark.testing") === "true")
}

test("Directory Arguments Parsing --dir or -d") {
val argStrings = Array("--dir", "src/test/resources/spark-events1")
val hsa = new HistoryServerArguments(conf, argStrings)
assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events1")
}

test("Directory Param can also be set directly") {
val argStrings = Array("src/test/resources/spark-events2")
val hsa = new HistoryServerArguments(conf, argStrings)
assert(conf.get("spark.history.fs.logDirectory") === "src/test/resources/spark-events2")
}

test("Properties File Arguments Parsing --properties-file") {
val tmpDir = Utils.createTempDir()
val outFile = File.createTempFile("test-load-spark-properties", "test", tmpDir)
Expand Down
17 changes: 16 additions & 1 deletion sbin/start-history-server.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,22 @@ if [ -z "${SPARK_HOME}" ]; then
export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
fi

# NOTE: This exact class name is matched downstream by SparkSubmit.
# Any changes need to be reflected there.
CLASS="org.apache.spark.deploy.history.HistoryServer"

if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
echo "Usage: ./sbin/start-history-server.sh [options]"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: why not have a separated usage() function?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well this is short, and I am following what start-master.sh and start-slave.sh did.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well, I also saw similar code in start-thriftserver.sh, it uses usage(). Both are fine to me, just head up to make sure we've taken that into consideration.

pattern="Usage:"
pattern+="\|Using Spark's default log4j profile:"
pattern+="\|Started daemon with process name"
pattern+="\|Registered signal handler for"

"${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
exit 1
fi

. "${SPARK_HOME}/sbin/spark-config.sh"
. "${SPARK_HOME}/bin/load-spark-env.sh"

exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 "$@"
exec "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS 1 "$@"