@@ -33,6 +33,16 @@ import org.apache.spark.ui.TestFilter
3333
3434class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with LocalSparkContext {
3535
36+ private var yarnSchedulerBackend : YarnSchedulerBackend = _
37+
38+ override def afterEach () {
39+ try {
40+ yarnSchedulerBackend.stop()
41+ } finally {
42+ super .afterEach()
43+ }
44+ }
45+
3646 test(" RequestExecutors reflects node blacklist and is serializable" ) {
3747 sc = new SparkContext (" local" , " YarnSchedulerBackendSuite" )
3848 // Subclassing the TaskSchedulerImpl here instead of using Mockito. For details see SPARK-26891.
@@ -44,11 +54,12 @@ class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with Loc
4454 override def nodeBlacklist (): Set [String ] = blacklistedNodes.get()
4555 }
4656
47- val yarnSchedulerBackend = new YarnSchedulerBackend (sched, sc) {
57+ val yarnSchedulerBackendExtended = new YarnSchedulerBackend (sched, sc) {
4858 def setHostToLocalTaskCount (hostToLocalTaskCount : Map [String , Int ]): Unit = {
4959 this .hostToLocalTaskCount = hostToLocalTaskCount
5060 }
5161 }
62+ yarnSchedulerBackend = yarnSchedulerBackendExtended
5263 val ser = new JavaSerializer (sc.conf).newInstance()
5364 for {
5465 blacklist <- IndexedSeq (Set [String ](), Set (" a" , " b" , " c" ))
@@ -58,9 +69,9 @@ class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with Loc
5869 Map (" a" -> 1 , " b" -> 2 )
5970 )
6071 } {
61- yarnSchedulerBackend .setHostToLocalTaskCount(hostToLocalCount)
72+ yarnSchedulerBackendExtended .setHostToLocalTaskCount(hostToLocalCount)
6273 sched.setNodeBlacklist(blacklist)
63- val req = yarnSchedulerBackend .prepareRequestExecutors(numRequested)
74+ val req = yarnSchedulerBackendExtended .prepareRequestExecutors(numRequested)
6475 assert(req.requestedTotal === numRequested)
6576 assert(req.nodeBlacklist === blacklist)
6677 assert(req.hostToLocalTaskCount.keySet.intersect(blacklist).isEmpty)
@@ -83,9 +94,9 @@ class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with Loc
8394 // Before adding the "YARN" filter, should get the code from the filter in SparkConf.
8495 assert(TestUtils .httpResponseCode(url) === HttpServletResponse .SC_BAD_GATEWAY )
8596
86- val backend = new YarnSchedulerBackend (sched, sc) { }
97+ yarnSchedulerBackend = new YarnSchedulerBackend (sched, sc) { }
8798
88- backend .addWebUIFilter(classOf [TestFilter2 ].getName(),
99+ yarnSchedulerBackend .addWebUIFilter(classOf [TestFilter2 ].getName(),
89100 Map (" responseCode" -> HttpServletResponse .SC_NOT_ACCEPTABLE .toString), " " )
90101
91102 sc.ui.get.getHandlers.foreach { h =>
0 commit comments