Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ private[spark] abstract class BasePythonRunner[IN, OUT](
/* backlog */ 1,
InetAddress.getByName("localhost")))
// A call to accept() for ServerSocket shall block infinitely.
serverSocket.map(_.setSoTimeout(0))
serverSocket.foreach(_.setSoTimeout(0))
new Thread("accept-connections") {
setDaemon(true)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ class AvroLogicalTypeSuite extends QueryTest with SharedSQLContext with SQLTestU
dataFileWriter.create(schema, new File(avroFile))
val logicalType = LogicalTypes.decimal(precision, scale)

decimalInputData.map { x =>
decimalInputData.foreach { x =>
val avroRec = new GenericData.Record(schema)
val decimal = new java.math.BigDecimal(x).setScale(scale)
val bytes =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ private[mesos] object MesosSchedulerBackendUtil extends Logging {
val containerInfo = ContainerInfo.newBuilder()
.setType(containerType)

conf.getOption("spark.mesos.executor.docker.image").map { image =>
conf.getOption("spark.mesos.executor.docker.image").foreach { image =>
val forcePullImage = conf
.getOption("spark.mesos.executor.docker.forcePullImage")
.exists(_.equals("true"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ class IntegralDeltaSuite extends SparkFunSuite {
val row = new GenericInternalRow(1)
val nullRow = new GenericInternalRow(1)
nullRow.setNullAt(0)
input.map { value =>
input.foreach { value =>
if (value == nullValue) {
builder.appendFrom(nullRow, 0)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class PassThroughSuite extends SparkFunSuite {

val builder = TestCompressibleColumnBuilder(columnStats, columnType, PassThrough)

input.map { value =>
input.foreach { value =>
val row = new GenericInternalRow(1)
columnType.setField(row, 0, value)
builder.appendFrom(row, 0)
Expand Down Expand Up @@ -98,7 +98,7 @@ class PassThroughSuite extends SparkFunSuite {
val row = new GenericInternalRow(1)
val nullRow = new GenericInternalRow(1)
nullRow.setNullAt(0)
input.map { value =>
input.foreach { value =>
if (value == nullValue) {
builder.appendFrom(nullRow, 0)
} else {
Expand Down