Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 11 additions & 10 deletions sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveShim.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@ package org.apache.spark.sql.hive
import java.io.{InputStream, OutputStream}
import java.rmi.server.UID

/* Implicit conversions */
import scala.collection.JavaConversions._
import scala.language.implicitConversions
import scala.reflect.ClassTag

import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import org.apache.hadoop.conf.Configuration
Expand All @@ -35,10 +40,6 @@ import org.apache.spark.Logging
import org.apache.spark.sql.types.Decimal
import org.apache.spark.util.Utils

/* Implicit conversions */
import scala.collection.JavaConversions._
import scala.reflect.ClassTag

private[hive] object HiveShim {
// Precision and scale to pass for unlimited decimals; these are the same as the precision and
// scale Hive 0.13 infers for BigDecimals from sources that don't specify them (e.g. UDFs)
Expand Down Expand Up @@ -68,10 +69,10 @@ private[hive] object HiveShim {
* Cannot use ColumnProjectionUtils.appendReadColumns directly, if ids is null or empty
*/
def appendReadColumns(conf: Configuration, ids: Seq[Integer], names: Seq[String]) {
if (ids != null && ids.size > 0) {
if (ids != null && ids.nonEmpty) {
ColumnProjectionUtils.appendReadColumns(conf, ids)
}
if (names != null && names.size > 0) {
if (names != null && names.nonEmpty) {
appendReadColumnNames(conf, names)
}
}
Expand Down Expand Up @@ -197,11 +198,11 @@ private[hive] object HiveShim {
}

/*
* Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
* Fix it through wrapper.
* */
* Bug introduced in hive-0.13. FileSinkDesc is serializable, but its member path is not.
* Fix it through wrapper.
*/
implicit def wrapperToFileSinkDesc(w: ShimFileSinkDesc): FileSinkDesc = {
var f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
val f = new FileSinkDesc(new Path(w.dir), w.tableInfo, w.compressed)
f.setCompressCodec(w.compressCodec)
f.setCompressType(w.compressType)
f.setTableInfo(w.tableInfo)
Expand Down