diff --git a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala index ad7a0972ef9d1..06530ff836466 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala @@ -19,6 +19,8 @@ package org.apache.spark.deploy.history import java.util.zip.ZipOutputStream +import scala.xml.Node + import org.apache.spark.SparkException import org.apache.spark.ui.SparkUI @@ -114,4 +116,8 @@ private[history] abstract class ApplicationHistoryProvider { */ def getApplicationInfo(appId: String): Option[ApplicationHistoryInfo] + /** + * @return html text to display when the application list is empty + */ + def getEmptyListingHtml(): Seq[Node] = Seq.empty } diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala index 3c2d169f3270e..530cc5252214b 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala @@ -23,6 +23,7 @@ import java.util.concurrent.{Executors, ExecutorService, TimeUnit} import java.util.zip.{ZipEntry, ZipOutputStream} import scala.collection.mutable +import scala.xml.Node import com.google.common.io.ByteStreams import com.google.common.util.concurrent.{MoreExecutors, ThreadFactoryBuilder} @@ -262,6 +263,17 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) } } + override def getEmptyListingHtml(): Seq[Node] = { +

+ Did you specify the correct logging directory? Please verify your setting of + spark.history.fs.logDirectory + listed above and whether you have the permissions to access it. +
+ It is also possible that your application did not run to + completion or did not stop the SparkContext. +

+ } + override def getConfig(): Map[String, String] = { val safeMode = if (isFsInSafeMode()) { Map("HDFS State" -> "In safe mode, application logs not available.") diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala index 95b72224e0f94..96b9ecf43b14c 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala @@ -47,13 +47,7 @@ private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") } else if (requestedIncomplete) {

No incomplete applications found!

} else { -

No completed applications found!

++ -

Did you specify the correct logging directory? - Please verify your setting of - spark.history.fs.logDirectory and whether you have the permissions to - access it.
It is also possible that your application did not run to - completion or did not stop the SparkContext. -

+

No completed applications found!

++ parent.emptyListingHtml } } diff --git a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala index 087c69e6489dd..3175b36b3e56f 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala @@ -22,6 +22,7 @@ import java.util.zip.ZipOutputStream import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse} import scala.util.control.NonFatal +import scala.xml.Node import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder} @@ -193,6 +194,13 @@ class HistoryServer( provider.writeEventLogs(appId, attemptId, zipStream) } + /** + * @return html text to display when the application list is empty + */ + def emptyListingHtml(): Seq[Node] = { + provider.getEmptyListingHtml() + } + /** * Returns the provider configuration to show in the listing page. *