Skip to content

Commit 661c28e

Browse files
author
Andrew Or
committed
Fix concurrent query with fork-join pool
1 parent 46881b4 commit 661c28e

File tree

2 files changed

+18
-1
lines changed

2 files changed

+18
-1
lines changed

core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -613,7 +613,12 @@ class DAGScheduler(
613613
properties: Properties): Unit = {
614614
val start = System.nanoTime
615615
val waiter = submitJob(rdd, func, partitions, callSite, resultHandler, properties)
616-
Await.ready(waiter.completionFuture, atMost = Duration.Inf)
616+
// Note: Do not call Await.ready(future) because that calls `scala.concurrent.blocking`,
617+
// which causes concurrent SQL executions to fail if a fork-join pool is used. Note that
618+
// due to idiosyncrasies in Scala, `awaitPermission` is not actually used anywhere so it's
619+
// safe to pass in null here. For more detail, see SPARK-13747.
620+
val awaitPermission = null.asInstanceOf[scala.concurrent.CanAwait]
621+
waiter.completionFuture.ready(Duration.Inf)(awaitPermission)
617622
waiter.completionFuture.value.get match {
618623
case scala.util.Success(_) =>
619624
logInfo("Job %d finished: %s, took %f s".format

sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,18 @@ class SQLExecutionSuite extends SparkFunSuite {
4949
}
5050
}
5151

52+
test("concurrent query execution with fork-join pool (SPARK-13747)") {
53+
val sc = new SparkContext("local[*]", "test")
54+
try {
55+
// Should not throw IllegalArgumentException
56+
(1 to 100).par.foreach { _ =>
57+
sc.parallelize(1 to 5).map { i => (i, i) }.toDF("a", "b").count()
58+
}
59+
} finally {
60+
sc.stop()
61+
}
62+
}
63+
5264
/**
5365
* Trigger SPARK-10548 by mocking a parent and its child thread executing queries concurrently.
5466
*/

0 commit comments

Comments
 (0)