diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala index 1332b9eb444..bc80fa9de8f 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala @@ -25,17 +25,12 @@ import scala.collection.JavaConverters._ import org.apache.hive.service.rpc.thrift._ import org.apache.spark.sql.Row import org.apache.spark.sql.execution.HiveResult -import org.apache.spark.sql.execution.HiveResult.TimeFormatters import org.apache.spark.sql.types._ import org.apache.kyuubi.util.RowSetUtils._ object RowSet { - def getTimeFormatters(timeZone: ZoneId): TimeFormatters = { - HiveResult.getTimeFormatters - } - def toTRowSet( bytes: Array[Byte], protocolVersion: TProtocolVersion): TRowSet = { @@ -153,8 +148,8 @@ object RowSet { while (i < rowSize) { val row = rows(i) nulls.set(i, row.isNullAt(ordinal)) - values.add( - HiveResult.toHiveString(row.get(ordinal) -> typ, false, getTimeFormatters(timeZone))) + val timeFormatters = HiveResult.getTimeFormatters + values.add(HiveResult.toHiveString(row.get(ordinal) -> typ, false, timeFormatters)) i += 1 } TColumn.stringVal(new TStringColumn(values, nulls)) @@ -239,7 +234,7 @@ object RowSet { HiveResult.toHiveString( row.get(ordinal) -> types(ordinal).dataType, false, - getTimeFormatters(timeZone))) + HiveResult.getTimeFormatters)) } TColumnValue.stringVal(tStrValue) } diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/kyuubi/SparkDatasetHelper.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/kyuubi/SparkDatasetHelper.scala index 4b853582621..f612644d276 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/kyuubi/SparkDatasetHelper.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/kyuubi/SparkDatasetHelper.scala @@ -25,8 +25,6 @@ import org.apache.spark.sql.execution.HiveResult import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ -import org.apache.kyuubi.engine.spark.schema.RowSet - object SparkDatasetHelper { def toArrowBatchRdd[T](ds: Dataset[T]): RDD[Array[Byte]] = { ds.toArrowBatchRdd @@ -42,11 +40,11 @@ object SparkDatasetHelper { val dt = DataType.fromDDL(schemaDDL) dt match { case StructType(Array(StructField(_, st: StructType, _, _))) => - HiveResult.toHiveString((row, st), true, RowSet.getTimeFormatters(timeZone)) + HiveResult.toHiveString((row, st), true, HiveResult.getTimeFormatters) case StructType(Array(StructField(_, at: ArrayType, _, _))) => - HiveResult.toHiveString((row.toSeq.head, at), true, RowSet.getTimeFormatters(timeZone)) + HiveResult.toHiveString((row.toSeq.head, at), true, HiveResult.getTimeFormatters) case StructType(Array(StructField(_, mt: MapType, _, _))) => - HiveResult.toHiveString((row.toSeq.head, mt), true, RowSet.getTimeFormatters(timeZone)) + HiveResult.toHiveString((row.toSeq.head, mt), true, HiveResult.getTimeFormatters) case _ => throw new UnsupportedOperationException }