diff --git a/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala b/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala index b198448a299cf..8e58beff74290 100644 --- a/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala +++ b/core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala @@ -403,73 +403,6 @@ class ExecutorSuite extends SparkFunSuite assert(taskMetrics.getMetricValue("JVMHeapMemory") > 0) } - test("SPARK-32175: Plugin initialization should start after heartbeater started") { - withTempDir { tempDir => - val sparkPluginCodeBody = - """ - |@Override - |public org.apache.spark.api.plugin.ExecutorPlugin executorPlugin() { - | return new TestExecutorPlugin(); - |} - | - |@Override - |public org.apache.spark.api.plugin.DriverPlugin driverPlugin() { return null; } - """.stripMargin - val executorPluginBody = - """ - |@Override - |public void init( - | org.apache.spark.api.plugin.PluginContext ctx, - | java.util.Map extraConf) { - | try { - | Thread.sleep(8 * 1000); - | } catch (InterruptedException e) { - | throw new RuntimeException(e); - | } - |} - """.stripMargin - - val compiledExecutorPlugin = TestUtils.createCompiledClass( - "TestExecutorPlugin", - tempDir, - "", - null, - Seq.empty, - Seq("org.apache.spark.api.plugin.ExecutorPlugin"), - executorPluginBody) - - val thisClassPath = - sys.props("java.class.path").split(File.pathSeparator).map(p => new File(p).toURI.toURL) - val compiledSparkPlugin = TestUtils.createCompiledClass( - "TestSparkPlugin", - tempDir, - "", - null, - Seq(tempDir.toURI.toURL) ++ thisClassPath, - Seq("org.apache.spark.api.plugin.SparkPlugin"), - sparkPluginCodeBody) - - val jarUrl = TestUtils.createJar( - Seq(compiledSparkPlugin, compiledExecutorPlugin), - new File(tempDir, "testPlugin.jar")) - - val unusedJar = TestUtils.createJarWithClasses(Seq.empty) - val args = Seq( - "--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"), - "--name", "testApp", - "--master", "local-cluster[1,1,1024]", - "--conf", "spark.plugins=TestSparkPlugin", - "--conf", "spark.storage.blockManagerSlaveTimeoutMs=" + 5 * 1000, - "--conf", "spark.network.timeoutInterval=" + 1000, - "--conf", "spark.executor.heartbeatInterval=" + 1000, - "--conf", "spark.executor.extraClassPath=" + jarUrl.toString, - "--conf", "spark.driver.extraClassPath=" + jarUrl.toString, - "--conf", "spark.ui.enabled=false", - unusedJar.toString) - SparkSubmitSuite.runSparkSubmit(args, timeout = 30.seconds) - } - } - private def createMockEnv(conf: SparkConf, serializer: JavaSerializer): SparkEnv = { val mockEnv = mock[SparkEnv] val mockRpcEnv = mock[RpcEnv]