diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala index 1db032711ce42..5cd21e31c9554 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala @@ -132,6 +132,8 @@ private[spark] class BlockManager( conf.getBoolean("spark.shuffle.service.enabled", false) private val chunkSize = conf.getSizeAsBytes("spark.storage.memoryMapLimitForTests", Int.MaxValue.toString).toInt + private val remoteReadNioBufferConversion = + conf.getBoolean("spark.network.remoteReadNioBufferConversion", false) val diskBlockManager = { // Only perform cleanup if an external service is not serving our shuffle files. @@ -731,7 +733,14 @@ private[spark] class BlockManager( } if (data != null) { - return Some(ChunkedByteBuffer.fromManagedBuffer(data, chunkSize)) + // SPARK-24307 undocumented "escape-hatch" in case there are any issues in converting to + // ChunkedByteBuffer, to go back to old code-path. Can be removed post Spark 2.4 if + // new path is stable. + if (remoteReadNioBufferConversion) { + return Some(new ChunkedByteBuffer(data.nioByteBuffer())) + } else { + return Some(ChunkedByteBuffer.fromManagedBuffer(data, chunkSize)) + } } logDebug(s"The value of block $blockId is null") }