diff --git a/pom.xml b/pom.xml
index ff2c86a960aa..129e95af5d74 100644
--- a/pom.xml
+++ b/pom.xml
@@ -125,7 +125,7 @@
0.10.0
1.12.0
- 1.8.2
+ 1.9.2
0.8.0.RELEASE
1.19.0
4.2.4
@@ -456,7 +456,6 @@
org.apache.avro
avro-mapred
- hadoop2
${avro.version}
diff --git a/ql/pom.xml b/ql/pom.xml
index 0d57c25e1c3f..75360777ec6b 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -157,7 +157,6 @@
org.apache.avro
avro-mapred
- hadoop2
${avro.version}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index db8db1c9222a..8e882b41f313 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -272,7 +272,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
int scale = 0;
try {
- scale = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).asInt();
+ scale = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
} catch(Exception ex) {
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + fileSchema, ex);
}
@@ -288,7 +288,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
int maxLength = 0;
try {
- maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for char field from file schema: " + fileSchema, ex);
}
@@ -303,7 +303,7 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
maxLength = 0;
try {
- maxLength = fileSchema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) fileSchema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value for varchar field from file schema: " + fileSchema, ex);
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
index 35d83bdb1af0..03f07bb8482d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
@@ -136,8 +136,8 @@ public static TypeInfo generateTypeInfo(Schema schema,
int precision = 0;
int scale = 0;
try {
- precision = schema.getJsonProp(AvroSerDe.AVRO_PROP_PRECISION).getIntValue();
- scale = schema.getJsonProp(AvroSerDe.AVRO_PROP_SCALE).getIntValue();
+ precision = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_PRECISION);
+ scale = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_SCALE);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain scale value from file schema: " + schema, ex);
}
@@ -155,7 +155,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
AvroSerDe.CHAR_TYPE_NAME.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
int maxLength = 0;
try {
- maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
}
@@ -166,7 +166,7 @@ public static TypeInfo generateTypeInfo(Schema schema,
.equalsIgnoreCase(schema.getProp(AvroSerDe.AVRO_PROP_LOGICAL_TYPE))) {
int maxLength = 0;
try {
- maxLength = schema.getJsonProp(AvroSerDe.AVRO_PROP_MAX_LENGTH).getValueAsInt();
+ maxLength = (int) schema.getObjectProp(AvroSerDe.AVRO_PROP_MAX_LENGTH);
} catch (Exception ex) {
throw new AvroSerdeException("Failed to obtain maxLength value from file schema: " + schema, ex);
}