Skip to content

Commit beecc9e

Browse files
committed
reflect feedbacks
1 parent c800e68 commit beecc9e

File tree

2 files changed

+6
-9
lines changed

2 files changed

+6
-9
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/python/PythonArrowInput.scala

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ import org.apache.spark.{SparkEnv, TaskContext}
2626
import org.apache.spark.api.python.{BasePythonRunner, PythonRDD}
2727
import org.apache.spark.sql.catalyst.InternalRow
2828
import org.apache.spark.sql.execution.arrow.ArrowWriter
29-
import org.apache.spark.sql.internal.SQLConf
3029
import org.apache.spark.sql.types.StructType
3130
import org.apache.spark.sql.util.ArrowUtils
3231
import org.apache.spark.util.Utils
@@ -36,8 +35,6 @@ import org.apache.spark.util.Utils
3635
* JVM (an iterator of internal rows + additional data if required) to Python (Arrow).
3736
*/
3837
private[python] trait PythonArrowInput[IN] { self: BasePythonRunner[IN, _] =>
39-
protected val sqlConf = SQLConf.get
40-
4138
protected val workerConf: Map[String, String]
4239

4340
protected val schema: StructType
@@ -112,10 +109,10 @@ private[python] trait BasicPythonArrowInput extends PythonArrowInput[Iterator[In
112109
self: BasePythonRunner[Iterator[InternalRow], _] =>
113110

114111
protected def writeIteratorToArrowStream(
115-
root: VectorSchemaRoot,
116-
writer: ArrowStreamWriter,
117-
dataOut: DataOutputStream,
118-
inputIterator: Iterator[Iterator[InternalRow]]): Unit = {
112+
root: VectorSchemaRoot,
113+
writer: ArrowStreamWriter,
114+
dataOut: DataOutputStream,
115+
inputIterator: Iterator[Iterator[InternalRow]]): Unit = {
119116
val arrowWriter = ArrowWriter.create(root)
120117

121118
while (inputIterator.hasNext) {

sql/core/src/main/scala/org/apache/spark/sql/execution/python/PythonArrowOutput.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,6 @@ private[python] trait BasicPythonArrowOutput extends PythonArrowOutput[ColumnarB
123123
self: BasePythonRunner[_, ColumnarBatch] =>
124124

125125
protected def deserializeColumnarBatch(
126-
batch: ColumnarBatch,
127-
schema: StructType): ColumnarBatch = batch
126+
batch: ColumnarBatch,
127+
schema: StructType): ColumnarBatch = batch
128128
}

0 commit comments

Comments
 (0)