Skip to content

Commit 2657377

Browse files
committed
Hive: Add Hive 4 support and remove Hive 3
1 parent 540d6a6 commit 2657377

File tree

63 files changed

+102
-2672
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

63 files changed

+102
-2672
lines changed

.github/labeler.yml

-3
Original file line numberDiff line numberDiff line change
@@ -100,10 +100,7 @@ ORC:
100100
HIVE:
101101
- changed-files:
102102
- any-glob-to-any-file: [
103-
'hive3/**/*',
104103
'hive-metastore/**/*',
105-
'hive-runtime/**/*',
106-
'hive3-orc-bundle/**/*'
107104
]
108105

109106
DATA:

.github/workflows/delta-conversion-ci.yml

-3
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,6 @@ on:
4848
- '.asf.yml'
4949
- 'dev/**'
5050
- 'mr/**'
51-
- 'hive3/**'
52-
- 'hive3-orc-bundle/**'
53-
- 'hive-runtime/**'
5451
- 'flink/**'
5552
- 'kafka-connect/**'
5653
- 'docs/**'

.github/workflows/flink-ci.yml

-3
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,6 @@ on:
4848
- '.asf.yml'
4949
- 'dev/**'
5050
- 'mr/**'
51-
- 'hive3/**'
52-
- 'hive3-orc-bundle/**'
53-
- 'hive-runtime/**'
5451
- 'kafka-connect/**'
5552
- 'spark/**'
5653
- 'docs/**'

.github/workflows/hive-ci.yml

+2-31
Original file line numberDiff line numberDiff line change
@@ -87,39 +87,10 @@ jobs:
8787
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
8888
restore-keys: ${{ runner.os }}-gradle-
8989
- run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts
90-
- run: ./gradlew -DsparkVersions= -DhiveVersions=2 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-mr:check :iceberg-hive-runtime:check -x javadoc
90+
- run: ./gradlew -DsparkVersions= -DhiveVersions=2 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-mr:check -x javadoc
9191
- uses: actions/upload-artifact@v4
9292
if: failure()
9393
with:
9494
name: test logs
9595
path: |
96-
**/build/testlogs
97-
98-
hive3-tests:
99-
runs-on: ubuntu-22.04
100-
strategy:
101-
matrix:
102-
jvm: [11, 17, 21]
103-
env:
104-
SPARK_LOCAL_IP: localhost
105-
steps:
106-
- uses: actions/checkout@v4
107-
- uses: actions/setup-java@v4
108-
with:
109-
distribution: zulu
110-
java-version: ${{ matrix.jvm }}
111-
- uses: actions/cache@v4
112-
with:
113-
path: |
114-
~/.gradle/caches
115-
~/.gradle/wrapper
116-
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }}
117-
restore-keys: ${{ runner.os }}-gradle-
118-
- run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts
119-
- run: ./gradlew -DsparkVersions= -DhiveVersions=3 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-hive3-orc-bundle:check :iceberg-hive3:check :iceberg-hive-runtime:check -x javadoc
120-
- uses: actions/upload-artifact@v4
121-
if: failure()
122-
with:
123-
name: test logs
124-
path: |
125-
**/build/testlogs
96+
**/build/testlogs

.github/workflows/kafka-connect-ci.yml

-3
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,6 @@ on:
4949
- 'dev/**'
5050
- 'mr/**'
5151
- 'flink/**'
52-
- 'hive3/**'
53-
- 'hive3-orc-bundle/**'
54-
- 'hive-runtime/**'
5552
- 'spark/**'
5653
- 'docs/**'
5754
- 'site/**'

.github/workflows/spark-ci.yml

-3
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,6 @@ on:
4949
- 'dev/**'
5050
- 'site/**'
5151
- 'mr/**'
52-
- 'hive3/**'
53-
- 'hive3-orc-bundle/**'
54-
- 'hive-runtime/**'
5552
- 'flink/**'
5653
- 'kafka-connect/**'
5754
- 'docs/**'

build.gradle

+6-4
Original file line numberDiff line numberDiff line change
@@ -675,7 +675,7 @@ project(':iceberg-hive-metastore') {
675675

676676
compileOnly libs.avro.avro
677677

678-
compileOnly(libs.hive2.metastore) {
678+
compileOnly(libs.hive4.metastore) {
679679
exclude group: 'org.apache.avro', module: 'avro'
680680
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
681681
exclude group: 'org.pentaho' // missing dependency
@@ -695,7 +695,7 @@ project(':iceberg-hive-metastore') {
695695
// that's really old. We use the core classifier to be able to override our guava
696696
// version. Luckily, hive-exec seems to work okay so far with this version of guava
697697
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
698-
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
698+
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
699699
exclude group: 'org.apache.avro', module: 'avro'
700700
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
701701
exclude group: 'org.pentaho' // missing dependency
@@ -707,7 +707,7 @@ project(':iceberg-hive-metastore') {
707707
exclude group: 'com.google.code.findbugs', module: 'jsr305'
708708
}
709709

710-
testImplementation(libs.hive2.metastore) {
710+
testImplementation(libs.hive4.metastore) {
711711
exclude group: 'org.apache.avro', module: 'avro'
712712
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
713713
exclude group: 'org.pentaho' // missing dependency
@@ -723,7 +723,9 @@ project(':iceberg-hive-metastore') {
723723
exclude group: 'com.zaxxer', module: 'HikariCP'
724724
}
725725

726-
compileOnly(libs.hadoop2.client) {
726+
testImplementation(libs.hive4.standalone.metastore.server)
727+
728+
compileOnly(libs.hadoop3.client) {
727729
exclude group: 'org.apache.avro', module: 'avro'
728730
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
729731
}

flink/v1.18/build.gradle

+2-2
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
8888
// that's really old. We use the core classifier to be able to override our guava
8989
// version. Luckily, hive-exec seems to work okay so far with this version of guava
9090
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
91-
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
91+
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
9292
exclude group: 'org.apache.avro', module: 'avro'
9393
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
9494
exclude group: 'org.pentaho' // missing dependency
@@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
100100
exclude group: 'com.google.code.findbugs', module: 'jsr305'
101101
}
102102

103-
testImplementation(libs.hive2.metastore) {
103+
testImplementation(libs.hive4.metastore) {
104104
exclude group: 'org.apache.avro', module: 'avro'
105105
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
106106
exclude group: 'org.pentaho' // missing dependency

flink/v1.18/flink/src/test/java/org/apache/iceberg/flink/CatalogTestBase.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,6 @@ protected String getFullQualifiedTableName(String tableName) {
116116
}
117117

118118
static String getURI(HiveConf conf) {
119-
return conf.get(HiveConf.ConfVars.METASTOREURIS.varname);
119+
return conf.get(HiveConf.ConfVars.METASTORE_URIS.varname);
120120
}
121121
}

flink/v1.18/flink/src/test/java/org/apache/iceberg/flink/TestFlinkHiveCatalog.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,8 @@ public void testCreateCatalogWithHiveConfDir() throws IOException {
5757
Configuration newConf = new Configuration(hiveConf);
5858
// Set another new directory which is different with the hive metastore's warehouse path.
5959
newConf.set(
60-
HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
60+
HiveConf.ConfVars.METASTORE_WAREHOUSE.varname,
61+
"file://" + warehouseDir.getAbsolutePath());
6162
newConf.writeXml(fos);
6263
}
6364
assertThat(hiveSiteXML.toPath()).exists();

flink/v1.18/flink/src/test/java/org/apache/iceberg/flink/source/TestFlinkInputFormatReaderDeletes.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
4444
Map<String, String> properties = Maps.newHashMap();
4545
properties.put(
4646
CatalogProperties.WAREHOUSE_LOCATION,
47-
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
48-
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
47+
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
48+
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
4949
properties.put(
5050
CatalogProperties.CLIENT_POOL_SIZE,
5151
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));

flink/v1.18/flink/src/test/java/org/apache/iceberg/flink/source/TestIcebergSourceReaderDeletes.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
6262
Map<String, String> properties = Maps.newHashMap();
6363
properties.put(
6464
CatalogProperties.WAREHOUSE_LOCATION,
65-
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
66-
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
65+
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
66+
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
6767
properties.put(
6868
CatalogProperties.CLIENT_POOL_SIZE,
6969
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));

flink/v1.19/build.gradle

+4-4
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
8888
// that's really old. We use the core classifier to be able to override our guava
8989
// version. Luckily, hive-exec seems to work okay so far with this version of guava
9090
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
91-
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
91+
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
9292
exclude group: 'org.apache.avro', module: 'avro'
9393
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
9494
exclude group: 'org.pentaho' // missing dependency
@@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
100100
exclude group: 'com.google.code.findbugs', module: 'jsr305'
101101
}
102102

103-
testImplementation(libs.hive2.metastore) {
103+
testImplementation(libs.hive4.metastore) {
104104
exclude group: 'org.apache.avro', module: 'avro'
105105
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
106106
exclude group: 'org.pentaho' // missing dependency
@@ -193,7 +193,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
193193
exclude group: 'org.apache.avro', module: 'avro'
194194
}
195195

196-
integrationImplementation(libs.hive2.metastore) {
196+
integrationImplementation(libs.hive4.metastore) {
197197
exclude group: 'org.apache.avro', module: 'avro'
198198
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
199199
exclude group: 'org.pentaho' // missing dependency
@@ -210,7 +210,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
210210
exclude group: 'org.slf4j'
211211
}
212212

213-
integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
213+
integrationImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
214214
exclude group: 'org.apache.avro', module: 'avro'
215215
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
216216
exclude group: 'org.pentaho' // missing dependency

flink/v1.19/flink/src/test/java/org/apache/iceberg/flink/CatalogTestBase.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,6 @@ protected String getFullQualifiedTableName(String tableName) {
116116
}
117117

118118
static String getURI(HiveConf conf) {
119-
return conf.get(HiveConf.ConfVars.METASTOREURIS.varname);
119+
return conf.get(HiveConf.ConfVars.METASTORE_URIS.varname);
120120
}
121121
}

flink/v1.19/flink/src/test/java/org/apache/iceberg/flink/TestFlinkHiveCatalog.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,8 @@ public void testCreateCatalogWithHiveConfDir() throws IOException {
5757
Configuration newConf = new Configuration(hiveConf);
5858
// Set another new directory which is different with the hive metastore's warehouse path.
5959
newConf.set(
60-
HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
60+
HiveConf.ConfVars.METASTORE_WAREHOUSE.varname,
61+
"file://" + warehouseDir.getAbsolutePath());
6162
newConf.writeXml(fos);
6263
}
6364
assertThat(hiveSiteXML.toPath()).exists();

flink/v1.19/flink/src/test/java/org/apache/iceberg/flink/source/TestFlinkInputFormatReaderDeletes.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
4444
Map<String, String> properties = Maps.newHashMap();
4545
properties.put(
4646
CatalogProperties.WAREHOUSE_LOCATION,
47-
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
48-
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
47+
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
48+
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
4949
properties.put(
5050
CatalogProperties.CLIENT_POOL_SIZE,
5151
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));

flink/v1.19/flink/src/test/java/org/apache/iceberg/flink/source/TestIcebergSourceReaderDeletes.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
6262
Map<String, String> properties = Maps.newHashMap();
6363
properties.put(
6464
CatalogProperties.WAREHOUSE_LOCATION,
65-
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
66-
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
65+
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
66+
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
6767
properties.put(
6868
CatalogProperties.CLIENT_POOL_SIZE,
6969
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));

flink/v1.20/build.gradle

+4-4
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
8888
// that's really old. We use the core classifier to be able to override our guava
8989
// version. Luckily, hive-exec seems to work okay so far with this version of guava
9090
// See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context.
91-
testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
91+
testImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
9292
exclude group: 'org.apache.avro', module: 'avro'
9393
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
9494
exclude group: 'org.pentaho' // missing dependency
@@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") {
100100
exclude group: 'com.google.code.findbugs', module: 'jsr305'
101101
}
102102

103-
testImplementation(libs.hive2.metastore) {
103+
testImplementation(libs.hive4.metastore) {
104104
exclude group: 'org.apache.avro', module: 'avro'
105105
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
106106
exclude group: 'org.pentaho' // missing dependency
@@ -193,7 +193,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
193193
exclude group: 'org.apache.avro', module: 'avro'
194194
}
195195

196-
integrationImplementation(libs.hive2.metastore) {
196+
integrationImplementation(libs.hive4.metastore) {
197197
exclude group: 'org.apache.avro', module: 'avro'
198198
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
199199
exclude group: 'org.pentaho' // missing dependency
@@ -210,7 +210,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") {
210210
exclude group: 'org.slf4j'
211211
}
212212

213-
integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") {
213+
integrationImplementation("${libs.hive4.exec.get().module}:${libs.hive4.exec.get().getVersion()}:core") {
214214
exclude group: 'org.apache.avro', module: 'avro'
215215
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
216216
exclude group: 'org.pentaho' // missing dependency

flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/CatalogTestBase.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,6 @@ protected String getFullQualifiedTableName(String tableName) {
116116
}
117117

118118
static String getURI(HiveConf conf) {
119-
return conf.get(HiveConf.ConfVars.METASTOREURIS.varname);
119+
return conf.get(HiveConf.ConfVars.METASTORE_URIS.varname);
120120
}
121121
}

flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/TestFlinkHiveCatalog.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,8 @@ public void testCreateCatalogWithHiveConfDir() throws IOException {
5757
Configuration newConf = new Configuration(hiveConf);
5858
// Set another new directory which is different with the hive metastore's warehouse path.
5959
newConf.set(
60-
HiveConf.ConfVars.METASTOREWAREHOUSE.varname, "file://" + warehouseDir.getAbsolutePath());
60+
HiveConf.ConfVars.METASTORE_WAREHOUSE.varname,
61+
"file://" + warehouseDir.getAbsolutePath());
6162
newConf.writeXml(fos);
6263
}
6364
assertThat(hiveSiteXML.toPath()).exists();

flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/source/TestFlinkInputFormatReaderDeletes.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
4444
Map<String, String> properties = Maps.newHashMap();
4545
properties.put(
4646
CatalogProperties.WAREHOUSE_LOCATION,
47-
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
48-
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
47+
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
48+
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
4949
properties.put(
5050
CatalogProperties.CLIENT_POOL_SIZE,
5151
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));

flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/source/TestIcebergSourceReaderDeletes.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ protected StructLikeSet rowSet(String tableName, Table testTable, String... colu
6262
Map<String, String> properties = Maps.newHashMap();
6363
properties.put(
6464
CatalogProperties.WAREHOUSE_LOCATION,
65-
hiveConf.get(HiveConf.ConfVars.METASTOREWAREHOUSE.varname));
66-
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTOREURIS.varname));
65+
hiveConf.get(HiveConf.ConfVars.METASTORE_WAREHOUSE.varname));
66+
properties.put(CatalogProperties.URI, hiveConf.get(HiveConf.ConfVars.METASTORE_URIS.varname));
6767
properties.put(
6868
CatalogProperties.CLIENT_POOL_SIZE,
6969
Integer.toString(hiveConf.getInt("iceberg.hive.client-pool-size", 5)));

gradle.properties

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ jmhIncludeRegex=.*
1919
systemProp.defaultFlinkVersions=1.20
2020
systemProp.knownFlinkVersions=1.18,1.19,1.20
2121
systemProp.defaultHiveVersions=2
22-
systemProp.knownHiveVersions=2,3
22+
systemProp.knownHiveVersions=2,4
2323
systemProp.defaultSparkVersions=3.5
2424
systemProp.knownSparkVersions=3.3,3.4,3.5
2525
systemProp.defaultKafkaVersions=3

gradle/libs.versions.toml

+6-5
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ hadoop2 = "2.7.3"
5050
hadoop3 = "3.4.1"
5151
httpcomponents-httpclient5 = "5.4.1"
5252
hive2 = { strictly = "2.3.9"} # see rich version usage explanation above
53-
hive3 = "3.1.3"
53+
hive4 = "4.0.1"
5454
immutables-value = "2.10.1"
5555
jackson-bom = "2.18.2"
5656
jackson211 = { strictly = "2.11.4"} # see rich version usage explanation above
@@ -139,10 +139,11 @@ hive2-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive2" }
139139
hive2-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive2" }
140140
hive2-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive2" }
141141
hive2-service = { module = "org.apache.hive:hive-service", version.ref = "hive2" }
142-
hive3-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive3" }
143-
hive3-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive3" }
144-
hive3-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive3" }
145-
hive3-service = { module = "org.apache.hive:hive-service", version.ref = "hive3" }
142+
hive4-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive4" }
143+
hive4-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive4" }
144+
hive4-standalone-metastore-server = { module = "org.apache.hive:hive-standalone-metastore-server", version.ref = "hive4" }
145+
hive4-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive4" }
146+
hive4-service = { module = "org.apache.hive:hive-service", version.ref = "hive4" }
146147
httpcomponents-httpclient5 = { module = "org.apache.httpcomponents.client5:httpclient5", version.ref = "httpcomponents-httpclient5" }
147148
immutables-value = { module = "org.immutables:value", version.ref = "immutables-value" }
148149
jackson-bom = { module = "com.fasterxml.jackson:jackson-bom", version.ref = "jackson-bom" }

hive-metastore/src/main/java/org/apache/iceberg/hive/CachedClientPool.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ public <R> R run(Action<R, IMetaStoreClient, TException> action, boolean retry)
132132
static Key extractKey(String cacheKeys, Configuration conf) {
133133
// generate key elements in a certain order, so that the Key instances are comparable
134134
List<Object> elements = Lists.newArrayList();
135-
elements.add(conf.get(HiveConf.ConfVars.METASTOREURIS.varname, ""));
135+
elements.add(conf.get(HiveConf.ConfVars.METASTORE_URIS.varname, ""));
136136
elements.add(conf.get(HiveCatalog.HIVE_CONF_CATALOG, "hive"));
137137
if (cacheKeys == null || cacheKeys.isEmpty()) {
138138
return Key.of(elements);

0 commit comments

Comments
 (0)