From 26d22d084a4a60e7c8e2e39c0652db67d38de5e9 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Fri, 10 Jan 2025 15:35:02 +0100
Subject: [PATCH 01/40] HIVE-28059 : major rebase stage 1;
---
.../hive/TestHiveIcebergBranchOperation.java | 4 +-
.../TestHiveIcebergStorageHandlerNoScan.java | 4 +-
...n_partition_evolution_w_id_spec_w_filter.q | 19 +--
.../src/test/queries/positive/iceberg_stats.q | 22 ---
.../test/results/positive/iceberg_stats.q.out | 159 ------------------
...rtition_evolution_w_id_spec_w_filter.q.out | 112 +++---------
.../hive/metastore/HiveMetaStoreClient.java | 6 +-
.../hive/metastore/conf/MetastoreConf.java | 2 +
.../hadoop/hive/metastore/HiveMetaStore.java | 10 ++
9 files changed, 42 insertions(+), 296 deletions(-)
diff --git a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergBranchOperation.java b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergBranchOperation.java
index d62f83eb6fcc..c5eb59987fb0 100644
--- a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergBranchOperation.java
+++ b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergBranchOperation.java
@@ -231,12 +231,12 @@ public void testCreateBranchFromTag() throws IOException, InterruptedException {
// Create a branch based on a tag which doesn't exist will fail.
Assertions.assertThatThrownBy(() -> shell.executeStatement(String.format(
"ALTER TABLE customers CREATE BRANCH %s FOR TAG AS OF %s", branchName2, nonExistTag)))
- .isInstanceOf(IllegalArgumentException.class).hasMessageContaining("does not exist");
+ .isInstanceOf(IllegalArgumentException.class).hasMessageEndingWith("does not exist");
// Create a branch based on a branch will fail.
Assertions.assertThatThrownBy(() -> shell.executeStatement(String.format(
"ALTER TABLE customers CREATE BRANCH %s FOR TAG AS OF %s", branchName2, branchName1)))
- .isInstanceOf(IllegalArgumentException.class).hasMessageContaining("does not exist");
+ .isInstanceOf(IllegalArgumentException.class).hasMessageEndingWith("does not exist");
}
@Test
diff --git a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
index a8072f45afe6..70c5f6880418 100644
--- a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
+++ b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java
@@ -700,7 +700,7 @@ public void testCreateTableError() {
"')"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Failed to execute Hive query")
- .hasMessageContaining("Table location not set");
+ .hasMessageEndingWith("Table location not set");
}
}
@@ -775,7 +775,7 @@ public void testCreatePartitionedTableWithPropertiesAndWithColumnSpecification()
"')"))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Failed to execute Hive query")
- .hasMessageContaining(
+ .hasMessageEndingWith(
"Provide only one of the following: Hive partition transform specification, " +
"or the iceberg.mr.table.partition.spec property");
}
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q
index 53e915d09ca2..7d0576343aea 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q
@@ -65,26 +65,11 @@ delete from ice_orc where last_name in ('ln1', 'ln9');
delete from ice_orc where last_name in ('ln3', 'ln11');
delete from ice_orc where last_name in ('ln5', 'ln13');
-alter table ice_orc set partition spec(team_id);
-insert into ice_orc VALUES
- ('fn17', 'ln17', 1, 10, 100),
- ('fn18','ln18', 1, 10, 100);
-insert into ice_orc VALUES
- ('fn19','ln19', 2, 11, 100),
- ('fn20','ln20', 2, 11, 100);
-insert into ice_orc VALUES
- ('fn21','ln21', 3, 12, 100),
- ('fn22','ln22', 3, 12, 100);
-insert into ice_orc VALUES
- ('fn23','ln23', 4, 13, 100),
- ('fn24','ln24', 4, 13, 100);
-
-
select * from ice_orc;
describe formatted ice_orc;
-explain alter table ice_orc COMPACT 'major' and wait where company_id=100 or dept_id in (1,2);
-alter table ice_orc COMPACT 'major' and wait where company_id=100 or dept_id in (1,2);
+explain alter table ice_orc COMPACT 'major' and wait where team_id=10 or first_name in ('fn3', 'fn11') or last_name in ('ln7', 'ln15');
+alter table ice_orc COMPACT 'major' and wait where team_id=10 or first_name in ('fn3', 'fn11') or last_name in ('ln7', 'ln15');
select * from ice_orc;
describe formatted ice_orc;
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_stats.q b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_stats.q
index 6fc965e17456..de88018f32e0 100644
--- a/iceberg/iceberg-handler/src/test/queries/positive/iceberg_stats.q
+++ b/iceberg/iceberg-handler/src/test/queries/positive/iceberg_stats.q
@@ -28,27 +28,5 @@ select count(*) from ice01;
insert overwrite table ice01 select * from ice01;
explain select count(*) from ice01;
--- false means that count(*) query won't use row count stored in HMS
-set iceberg.hive.keep.stats=false;
-
-create external table ice03 (id int, key int) Stored by Iceberg stored as ORC
- TBLPROPERTIES('format-version'='2');
-
-insert into ice03 values (1,1),(2,1),(3,1),(4,1),(5,1);
--- Iceberg table can utilize fetch task to directly retrieve the row count from iceberg SnapshotSummary
-explain select count(*) from ice03;
-select count(*) from ice03;
-
--- delete some values
-delete from ice03 where id in (2,4);
-
-explain select count(*) from ice03;
-select count(*) from ice03;
-
--- iow
-insert overwrite table ice03 select * from ice03;
-explain select count(*) from ice03;
-
drop table ice01;
drop table ice02;
-drop table ice03;
diff --git a/iceberg/iceberg-handler/src/test/results/positive/iceberg_stats.q.out b/iceberg/iceberg-handler/src/test/results/positive/iceberg_stats.q.out
index 4e5b70945016..33c60b54608d 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/iceberg_stats.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/iceberg_stats.q.out
@@ -192,155 +192,6 @@ STAGE PLANS:
Processor Tree:
ListSink
-PREHOOK: query: create external table ice03 (id int, key int) Stored by Iceberg stored as ORC
- TBLPROPERTIES('format-version'='2')
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@ice03
-POSTHOOK: query: create external table ice03 (id int, key int) Stored by Iceberg stored as ORC
- TBLPROPERTIES('format-version'='2')
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@ice03
-PREHOOK: query: insert into ice03 values (1,1),(2,1),(3,1),(4,1),(5,1)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@ice03
-POSTHOOK: query: insert into ice03 values (1,1),(2,1),(3,1),(4,1),(5,1)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@ice03
-PREHOOK: query: explain select count(*) from ice03
-PREHOOK: type: QUERY
-PREHOOK: Input: default@ice03
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: explain select count(*) from ice03
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-0
- Fetch Operator
- limit: 1
- Processor Tree:
- ListSink
-
-PREHOOK: query: select count(*) from ice03
-PREHOOK: type: QUERY
-PREHOOK: Input: default@ice03
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select count(*) from ice03
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-5
-PREHOOK: query: delete from ice03 where id in (2,4)
-PREHOOK: type: QUERY
-PREHOOK: Input: default@ice03
-PREHOOK: Output: default@ice03
-POSTHOOK: query: delete from ice03 where id in (2,4)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: default@ice03
-PREHOOK: query: explain select count(*) from ice03
-PREHOOK: type: QUERY
-PREHOOK: Input: default@ice03
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: explain select count(*) from ice03
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
- Stage: Stage-1
- Tez
-#### A masked pattern was here ####
- Edges:
- Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
- Vertices:
- Map 1
- Map Operator Tree:
- TableScan
- alias: ice03
- Statistics: Num rows: 3 Data size: #Masked# Basic stats: COMPLETE Column stats: COMPLETE
- Select Operator
- Statistics: Num rows: 3 Data size: #Masked# Basic stats: COMPLETE Column stats: COMPLETE
- Group By Operator
- aggregations: count()
- minReductionHashAggr: 0.6666666
- mode: hash
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: #Masked# Basic stats: COMPLETE Column stats: COMPLETE
- Reduce Output Operator
- null sort order:
- sort order:
- Statistics: Num rows: 1 Data size: #Masked# Basic stats: COMPLETE Column stats: COMPLETE
- value expressions: _col0 (type: bigint)
- Execution mode: vectorized
- Reducer 2
- Execution mode: vectorized
- Reduce Operator Tree:
- Group By Operator
- aggregations: count(VALUE._col0)
- mode: mergepartial
- outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: #Masked# Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- Statistics: Num rows: 1 Data size: #Masked# Basic stats: COMPLETE Column stats: COMPLETE
- table:
- input format: org.apache.hadoop.mapred.SequenceFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
- Processor Tree:
- ListSink
-
-PREHOOK: query: select count(*) from ice03
-PREHOOK: type: QUERY
-PREHOOK: Input: default@ice03
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: select count(*) from ice03
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-3
-PREHOOK: query: insert overwrite table ice03 select * from ice03
-PREHOOK: type: QUERY
-PREHOOK: Input: default@ice03
-PREHOOK: Output: default@ice03
-POSTHOOK: query: insert overwrite table ice03 select * from ice03
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: default@ice03
-PREHOOK: query: explain select count(*) from ice03
-PREHOOK: type: QUERY
-PREHOOK: Input: default@ice03
-PREHOOK: Output: hdfs://### HDFS PATH ###
-POSTHOOK: query: explain select count(*) from ice03
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: hdfs://### HDFS PATH ###
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-0
- Fetch Operator
- limit: 1
- Processor Tree:
- ListSink
-
PREHOOK: query: drop table ice01
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@ice01
@@ -361,13 +212,3 @@ POSTHOOK: type: DROPTABLE
POSTHOOK: Input: default@ice02
POSTHOOK: Output: database:default
POSTHOOK: Output: default@ice02
-PREHOOK: query: drop table ice03
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@ice03
-PREHOOK: Output: database:default
-PREHOOK: Output: default@ice03
-POSTHOOK: query: drop table ice03
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@ice03
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@ice03
diff --git a/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q.out b/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q.out
index 7df4035b818a..95a7ef33c919 100644
--- a/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q.out
+++ b/iceberg/iceberg-handler/src/test/results/positive/llap/iceberg_major_compaction_partition_evolution_w_id_spec_w_filter.q.out
@@ -149,61 +149,6 @@ POSTHOOK: query: delete from ice_orc where last_name in ('ln5', 'ln13')
POSTHOOK: type: QUERY
POSTHOOK: Input: default@ice_orc
POSTHOOK: Output: default@ice_orc
-PREHOOK: query: alter table ice_orc set partition spec(team_id)
-PREHOOK: type: ALTERTABLE_SETPARTSPEC
-PREHOOK: Input: default@ice_orc
-POSTHOOK: query: alter table ice_orc set partition spec(team_id)
-POSTHOOK: type: ALTERTABLE_SETPARTSPEC
-POSTHOOK: Input: default@ice_orc
-POSTHOOK: Output: default@ice_orc
-PREHOOK: query: insert into ice_orc VALUES
- ('fn17', 'ln17', 1, 10, 100),
- ('fn18','ln18', 1, 10, 100)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@ice_orc
-POSTHOOK: query: insert into ice_orc VALUES
- ('fn17', 'ln17', 1, 10, 100),
- ('fn18','ln18', 1, 10, 100)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@ice_orc
-PREHOOK: query: insert into ice_orc VALUES
- ('fn19','ln19', 2, 11, 100),
- ('fn20','ln20', 2, 11, 100)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@ice_orc
-POSTHOOK: query: insert into ice_orc VALUES
- ('fn19','ln19', 2, 11, 100),
- ('fn20','ln20', 2, 11, 100)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@ice_orc
-PREHOOK: query: insert into ice_orc VALUES
- ('fn21','ln21', 3, 12, 100),
- ('fn22','ln22', 3, 12, 100)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@ice_orc
-POSTHOOK: query: insert into ice_orc VALUES
- ('fn21','ln21', 3, 12, 100),
- ('fn22','ln22', 3, 12, 100)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@ice_orc
-PREHOOK: query: insert into ice_orc VALUES
- ('fn23','ln23', 4, 13, 100),
- ('fn24','ln24', 4, 13, 100)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@ice_orc
-POSTHOOK: query: insert into ice_orc VALUES
- ('fn23','ln23', 4, 13, 100),
- ('fn24','ln24', 4, 13, 100)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@ice_orc
PREHOOK: query: select * from ice_orc
PREHOOK: type: QUERY
PREHOOK: Input: default@ice_orc
@@ -217,15 +162,7 @@ fn12 ln12 2 11 100
fn14 ln14 3 12 100
fn15 ln15 4 13 100
fn16 ln16 4 13 100
-fn17 ln17 1 10 100
-fn18 ln18 1 10 100
-fn19 ln19 2 11 100
fn2 ln2 1 10 100
-fn20 ln20 2 11 100
-fn21 ln21 3 12 100
-fn22 ln22 3 12 100
-fn23 ln23 4 13 100
-fn24 ln24 4 13 100
fn4 ln4 2 11 100
fn6 ln6 3 12 100
fn7 ln7 4 13 100
@@ -245,7 +182,8 @@ company_id bigint
# Partition Transform Information
# col_name transform_type
-team_id IDENTITY
+company_id IDENTITY
+dept_id IDENTITY
# Detailed Table Information
Database: default
@@ -254,24 +192,24 @@ Retention: 0
#### A masked pattern was here ####
Table Type: EXTERNAL_TABLE
Table Parameters:
- COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"company_id\":\"true\",\"dept_id\":\"true\",\"first_name\":\"true\",\"last_name\":\"true\",\"team_id\":\"true\"}}
+ COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"}
EXTERNAL TRUE
bucketing_version 2
current-schema {\"type\":\"struct\",\"schema-id\":0,\"fields\":[{\"id\":1,\"name\":\"first_name\",\"required\":false,\"type\":\"string\"},{\"id\":2,\"name\":\"last_name\",\"required\":false,\"type\":\"string\"},{\"id\":3,\"name\":\"dept_id\",\"required\":false,\"type\":\"long\"},{\"id\":4,\"name\":\"team_id\",\"required\":false,\"type\":\"long\"},{\"id\":5,\"name\":\"company_id\",\"required\":false,\"type\":\"long\"}]}
current-snapshot-id #Masked#
- current-snapshot-summary {\"added-data-files\":\"1\",\"added-records\":\"2\",\"added-files-size\":\"#Masked#\",\"changed-partition-count\":\"1\",\"total-records\":\"24\",\"total-files-size\":\"#Masked#\",\"total-data-files\":\"12\",\"total-delete-files\":\"6\",\"total-position-deletes\":\"6\",\"total-equality-deletes\":\"0\",\"iceberg-version\":\"#Masked#\"}
+ current-snapshot-summary {\"added-position-delete-files\":\"2\",\"added-delete-files\":\"2\",\"added-files-size\":\"#Masked#\",\"added-position-deletes\":\"2\",\"changed-partition-count\":\"2\",\"total-records\":\"16\",\"total-files-size\":\"#Masked#\",\"total-data-files\":\"8\",\"total-delete-files\":\"6\",\"total-position-deletes\":\"6\",\"total-equality-deletes\":\"0\",\"iceberg-version\":\"#Masked#\"}
current-snapshot-timestamp-ms #Masked#
- default-partition-spec {\"spec-id\":2,\"fields\":[{\"name\":\"team_id\",\"transform\":\"identity\",\"source-id\":4,\"field-id\":1002}]}
+ default-partition-spec {\"spec-id\":1,\"fields\":[{\"name\":\"company_id\",\"transform\":\"identity\",\"source-id\":5,\"field-id\":1000},{\"name\":\"dept_id\",\"transform\":\"identity\",\"source-id\":3,\"field-id\":1001}]}
format-version 2
iceberg.orc.files.only true
#### A masked pattern was here ####
- numFiles 12
- numRows 18
+ numFiles 8
+ numRows 10
parquet.compression zstd
#### A masked pattern was here ####
rawDataSize 0
serialization.format 1
- snapshot-count 15
+ snapshot-count 11
storage_handler org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
table_type ICEBERG
totalSize #Masked#
@@ -288,11 +226,11 @@ InputFormat: org.apache.iceberg.mr.hive.HiveIcebergInputFormat
OutputFormat: org.apache.iceberg.mr.hive.HiveIcebergOutputFormat
Compressed: No
Sort Columns: []
-PREHOOK: query: explain alter table ice_orc COMPACT 'major' and wait where company_id=100 or dept_id in (1,2)
+PREHOOK: query: explain alter table ice_orc COMPACT 'major' and wait where team_id=10 or first_name in ('fn3', 'fn11') or last_name in ('ln7', 'ln15')
PREHOOK: type: ALTERTABLE_COMPACT
PREHOOK: Input: default@ice_orc
PREHOOK: Output: default@ice_orc
-POSTHOOK: query: explain alter table ice_orc COMPACT 'major' and wait where company_id=100 or dept_id in (1,2)
+POSTHOOK: query: explain alter table ice_orc COMPACT 'major' and wait where team_id=10 or first_name in ('fn3', 'fn11') or last_name in ('ln7', 'ln15')
POSTHOOK: type: ALTERTABLE_COMPACT
POSTHOOK: Input: default@ice_orc
POSTHOOK: Output: default@ice_orc
@@ -308,11 +246,11 @@ STAGE PLANS:
table name: default.ice_orc
blocking: true
-PREHOOK: query: alter table ice_orc COMPACT 'major' and wait where company_id=100 or dept_id in (1,2)
+PREHOOK: query: alter table ice_orc COMPACT 'major' and wait where team_id=10 or first_name in ('fn3', 'fn11') or last_name in ('ln7', 'ln15')
PREHOOK: type: ALTERTABLE_COMPACT
PREHOOK: Input: default@ice_orc
PREHOOK: Output: default@ice_orc
-POSTHOOK: query: alter table ice_orc COMPACT 'major' and wait where company_id=100 or dept_id in (1,2)
+POSTHOOK: query: alter table ice_orc COMPACT 'major' and wait where team_id=10 or first_name in ('fn3', 'fn11') or last_name in ('ln7', 'ln15')
POSTHOOK: type: ALTERTABLE_COMPACT
POSTHOOK: Input: default@ice_orc
POSTHOOK: Output: default@ice_orc
@@ -329,15 +267,7 @@ fn12 ln12 2 11 100
fn14 ln14 3 12 100
fn15 ln15 4 13 100
fn16 ln16 4 13 100
-fn17 ln17 1 10 100
-fn18 ln18 1 10 100
-fn19 ln19 2 11 100
fn2 ln2 1 10 100
-fn20 ln20 2 11 100
-fn21 ln21 3 12 100
-fn22 ln22 3 12 100
-fn23 ln23 4 13 100
-fn24 ln24 4 13 100
fn4 ln4 2 11 100
fn6 ln6 3 12 100
fn7 ln7 4 13 100
@@ -357,7 +287,8 @@ company_id bigint
# Partition Transform Information
# col_name transform_type
-team_id IDENTITY
+company_id IDENTITY
+dept_id IDENTITY
# Detailed Table Information
Database: default
@@ -371,19 +302,19 @@ Table Parameters:
bucketing_version 2
current-schema {\"type\":\"struct\",\"schema-id\":0,\"fields\":[{\"id\":1,\"name\":\"first_name\",\"required\":false,\"type\":\"string\"},{\"id\":2,\"name\":\"last_name\",\"required\":false,\"type\":\"string\"},{\"id\":3,\"name\":\"dept_id\",\"required\":false,\"type\":\"long\"},{\"id\":4,\"name\":\"team_id\",\"required\":false,\"type\":\"long\"},{\"id\":5,\"name\":\"company_id\",\"required\":false,\"type\":\"long\"}]}
current-snapshot-id #Masked#
- current-snapshot-summary {\"added-data-files\":\"4\",\"deleted-data-files\":\"8\",\"removed-position-delete-files\":\"6\",\"removed-delete-files\":\"6\",\"added-records\":\"10\",\"deleted-records\":\"16\",\"added-files-size\":\"#Masked#\",\"removed-files-size\":\"#Masked#\",\"removed-position-deletes\":\"6\",\"changed-partition-count\":\"9\",\"total-records\":\"18\",\"total-files-size\":\"#Masked#\",\"total-data-files\":\"8\",\"total-delete-files\":\"0\",\"total-position-deletes\":\"0\",\"total-equality-deletes\":\"0\",\"iceberg-version\":\"#Masked#\"}
+ current-snapshot-summary {\"added-data-files\":\"4\",\"deleted-data-files\":\"4\",\"removed-position-delete-files\":\"3\",\"removed-delete-files\":\"3\",\"added-records\":\"5\",\"deleted-records\":\"8\",\"added-files-size\":\"#Masked#\",\"removed-files-size\":\"#Masked#\",\"removed-position-deletes\":\"3\",\"changed-partition-count\":\"5\",\"total-records\":\"11\",\"total-files-size\":\"#Masked#\",\"total-data-files\":\"8\",\"total-delete-files\":\"1\",\"total-position-deletes\":\"1\",\"total-equality-deletes\":\"0\",\"iceberg-version\":\"#Masked#\"}
current-snapshot-timestamp-ms #Masked#
- default-partition-spec {\"spec-id\":2,\"fields\":[{\"name\":\"team_id\",\"transform\":\"identity\",\"source-id\":4,\"field-id\":1002}]}
+ default-partition-spec {\"spec-id\":1,\"fields\":[{\"name\":\"company_id\",\"transform\":\"identity\",\"source-id\":5,\"field-id\":1000},{\"name\":\"dept_id\",\"transform\":\"identity\",\"source-id\":3,\"field-id\":1001}]}
format-version 2
iceberg.orc.files.only true
#### A masked pattern was here ####
numFiles 8
- numRows 18
+ numRows 10
parquet.compression zstd
#### A masked pattern was here ####
rawDataSize 0
serialization.format 1
- snapshot-count 20
+ snapshot-count 15
storage_handler org.apache.iceberg.mr.hive.HiveIcebergStorageHandler
table_type ICEBERG
totalSize #Masked#
@@ -405,8 +336,7 @@ PREHOOK: type: SHOW COMPACTIONS
POSTHOOK: query: show compactions order by 'partition'
POSTHOOK: type: SHOW COMPACTIONS
CompactionId Database Table Partition Type State Worker host Worker Enqueue Time Start Time Duration(ms) HadoopJobId Error message Initiator host Initiator Pool name TxnId Next TxnId Commit Time Highest WriteId
-#Masked# default ice_orc team_id=10 MAJOR succeeded #Masked# manual default 0 0 0 ---
-#Masked# default ice_orc team_id=11 MAJOR succeeded #Masked# manual default 0 0 0 ---
-#Masked# default ice_orc team_id=12 MAJOR succeeded #Masked# manual default 0 0 0 ---
-#Masked# default ice_orc team_id=13 MAJOR succeeded #Masked# manual default 0 0 0 ---
+#Masked# default ice_orc company_id=100/dept_id=1 MAJOR succeeded #Masked# manual default 0 0 0 ---
+#Masked# default ice_orc company_id=100/dept_id=2 MAJOR succeeded #Masked# manual default 0 0 0 ---
+#Masked# default ice_orc company_id=100/dept_id=4 MAJOR succeeded #Masked# manual default 0 0 0 ---
#Masked# default ice_orc --- MAJOR succeeded #Masked# manual default 0 0 0 ---
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 780b0c025e01..e0b64f2c4927 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -282,7 +282,7 @@ public Void run() throws Exception {
open();
}
- /**
+ /**
* Instantiate the metastore server handler directly instead of connecting
* through the network
*
@@ -304,8 +304,8 @@ static ThriftHiveMetastore.Iface callEmbeddedMetastore(Configuration conf) throw
try {
Class> clazz = Class.forName(HIVE_METASTORE_CLASS);
//noinspection JavaReflectionMemberAccess
- Method method = clazz.getDeclaredMethod(HIVE_METASTORE_CREATE_HANDLER_METHOD,
- Configuration.class);
+ String methodName = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.HMS_HANDLER_CREATE);
+ Method method = clazz.getDeclaredMethod(methodName,Configuration.class);
method.setAccessible(true);
return (ThriftHiveMetastore.Iface) method.invoke(null, conf);
} catch (InvocationTargetException e) {
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
index c5e5fc3fd753..544f26dd9d88 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
@@ -911,6 +911,8 @@ public enum ConfVars {
HMS_HANDLER_PROXY_CLASS("metastore.hmshandler.proxy", "hive.metastore.hmshandler.proxy",
METASTORE_RETRYING_HANDLER_CLASS,
"The proxy class name of HMSHandler, default is RetryingHMSHandler."),
+ HMS_HANDLER_CREATE("metastore.hmshandler.create", "metastore.hmshandler.create","newHMSHandler",
+ "The method name to create new HMSHandler"),
IDENTIFIER_FACTORY("datanucleus.identifierFactory",
"datanucleus.identifierFactory", "datanucleus1",
"Name of the identifier factory to use when generating table/column names etc. \n" +
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 986bb2e4e848..0fd998b2df57 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -152,6 +152,16 @@ static Iface newHMSHandler(Configuration conf)
HMSHandler baseHandler = new HMSHandler("hive client", conf);
return HMSHandlerProxyFactory.getProxy(conf, baseHandler, true);
}
+
+ static Iface newHMSRetryingLocalHandler(Configuration conf)
+ throws MetaException {
+ HMSHandler baseHandler = new HMSHandler("hive client", conf);
+ RetryingHMSHandler handler = new RetryingHMSHandler(conf, baseHandler, true);
+ return (IHMSHandler) java.lang.reflect.Proxy.newProxyInstance(
+ RetryingHMSHandler.class.getClassLoader(),
+ new Class[] { IHMSHandler.class }, handler);
+ }
+
/**
* Discard a current delegation token.
From 23a3c909e8a621a4ccd5b6af47bcc2b9c89b3be8 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Fri, 31 Jan 2025 19:02:06 +0100
Subject: [PATCH 02/40] HIVE-28059 : major rebase and simpler code;
---
.../metastore-catalog/data/conf/README.txt | 1 +
.../metastore-catalog/pom.xml | 340 +++++++++
.../apache/iceberg/HiveCachingCatalog.java | 331 +++++++++
.../iceberg/rest/HMSCatalogAdapter.java | 672 ++++++++++++++++++
.../apache/iceberg/rest/HMSCatalogServer.java | 169 +++++
.../iceberg/rest/HMSCatalogServlet.java | 288 ++++++++
.../org/apache/iceberg/hive/HiveUtil.java | 68 ++
.../org/apache/iceberg/rest/HMSTestBase.java | 405 +++++++++++
.../apache/iceberg/rest/TestHMSCatalog.java | 158 ++++
.../auth/jwt/jwt-authorized-key.json | 12 +
.../auth/jwt/jwt-unauthorized-key.json | 12 +
.../auth/jwt/jwt-verification-jwks.json | 20 +
.../src/test/resources/hive-log4j2.properties | 39 +
.../hive/metastore/conf/MetastoreConf.java | 14 +
.../hadoop/hive/metastore/HiveMetaStore.java | 42 +-
.../hive/metastore/HmsThriftHttpServlet.java | 2 +-
.../hive/metastore/PropertyServlet.java | 27 +-
.../hive/metastore/SecureServletCaller.java | 60 ++
.../hive/metastore/ServletSecurity.java | 44 +-
standalone-metastore/pom.xml | 1 +
20 files changed, 2672 insertions(+), 33 deletions(-)
create mode 100644 standalone-metastore/metastore-catalog/data/conf/README.txt
create mode 100644 standalone-metastore/metastore-catalog/pom.xml
create mode 100644 standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/HiveCachingCatalog.java
create mode 100644 standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
create mode 100644 standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
create mode 100644 standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
create mode 100644 standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/HiveUtil.java
create mode 100644 standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
create mode 100644 standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
create mode 100644 standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
create mode 100644 standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
create mode 100644 standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
create mode 100644 standalone-metastore/metastore-catalog/src/test/resources/hive-log4j2.properties
create mode 100644 standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/SecureServletCaller.java
diff --git a/standalone-metastore/metastore-catalog/data/conf/README.txt b/standalone-metastore/metastore-catalog/data/conf/README.txt
new file mode 100644
index 000000000000..0b2f0f032f2f
--- /dev/null
+++ b/standalone-metastore/metastore-catalog/data/conf/README.txt
@@ -0,0 +1 @@
+Need to force creation of a directory.
diff --git a/standalone-metastore/metastore-catalog/pom.xml b/standalone-metastore/metastore-catalog/pom.xml
new file mode 100644
index 000000000000..69522c7adb3c
--- /dev/null
+++ b/standalone-metastore/metastore-catalog/pom.xml
@@ -0,0 +1,340 @@
+
+
+
+
+ hive-standalone-metastore
+ org.apache.hive
+ 4.1.0-SNAPSHOT
+
+ 4.0.0
+ hive-metastore-icecat
+ Hive Metastore Iceberg Catalog
+
+ ..
+ 8
+ 8
+ UTF-8
+ false
+ ${project.parent.version}
+ ${hive.version}
+ 1.6.1
+
+
+
+ org.apache.hive
+ hive-standalone-metastore-server
+ ${revision}
+
+
+ org.apache.hive
+ hive-standalone-metastore-common
+ ${revision}
+
+
+ org.apache.hive
+ hive-iceberg-shading
+ ${revision}
+
+
+ org.apache.hive
+ hive-iceberg-handler
+ ${revision}
+
+
+ org.apache.hive
+ hive-iceberg-catalog
+ ${revision}
+
+
+ org.apache.iceberg
+ iceberg-bundled-guava
+ ${iceberg.version}
+
+
+
+
+ org.apache.httpcomponents.core5
+ httpcore5
+ 5.2
+
+
+ junit
+ junit
+ test
+
+
+ com.github.tomakehurst
+ wiremock-jre8-standalone
+ 2.32.0
+ test
+
+
+ org.assertj
+ assertj-core
+ 3.19.0
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ 5.10.0
+ test
+
+
+ org.apache.hadoop
+ hadoop-auth
+ ${hadoop.version}
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ org.slf4j
+ slf4j-reload4j
+
+
+ ch.qos.reload4j
+ reload4j
+
+
+ commons-logging
+ commons-logging
+
+
+
+
+ org.apache.hadoop
+ hadoop-common
+ ${hadoop.version}
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ org.slf4j
+ slf4j-reload4j
+
+
+ ch.qos.reload4j
+ reload4j
+
+
+ commons-beanutils
+ commons-beanutils
+
+
+ commons-logging
+ commons-logging
+
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs-client
+ ${hadoop.version}
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ org.slf4j
+ slf4j-reload4j
+
+
+ ch.qos.reload4j
+ reload4j
+
+
+ commons-logging
+ commons-logging
+
+
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+ ${hadoop.version}
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ org.slf4j
+ slf4j-reload4j
+
+
+ ch.qos.reload4j
+ reload4j
+
+
+ commons-logging
+ commons-logging
+
+
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-core
+ ${hadoop.version}
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ org.slf4j
+ slf4j-reload4j
+
+
+ ch.qos.reload4j
+ reload4j
+
+
+ commons-logging
+ commons-logging
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+
+
+ assemble
+ none
+
+ single
+
+
+
+
+
+ org.apache.rat
+ apache-rat-plugin
+
+
+ process-resources
+
+ check
+
+
+
+
+
+ *.patch
+ DEV-README
+ **/src/main/sql/**
+ **/README.md
+ **/*.iml
+ **/*.txt
+ **/*.log
+ **/package-info.java
+ **/*.properties
+ **/*.q
+ **/*.q.out
+ **/*.xml
+ **/gen/**
+ **/patchprocess/**
+ **/metastore_db/**
+ **/test/resources/**/*.ldif
+ **/test/resources/**/*.sql
+ **/test/resources/**/*.json
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ ${surefire.version}
+
+
+ TestHMSCatalog.java
+ TestHiveCatalog.java
+
+
+
+
+ org.codehaus.mojo
+ exec-maven-plugin
+ 3.1.0
+
+
+ test
+
+
+
+ log4j2.debug
+ false
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-antrun-plugin
+ 3.1.0
+
+
+ generate-test-sources
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ copy
+
+ run
+
+
+
+
+
+
+
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/HiveCachingCatalog.java b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/HiveCachingCatalog.java
new file mode 100644
index 000000000000..ff55cd943ad1
--- /dev/null
+++ b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/HiveCachingCatalog.java
@@ -0,0 +1,331 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg;
+
+import com.github.benmanes.caffeine.cache.Cache;
+import com.github.benmanes.caffeine.cache.Caffeine;
+import com.github.benmanes.caffeine.cache.RemovalCause;
+import com.github.benmanes.caffeine.cache.RemovalListener;
+import com.github.benmanes.caffeine.cache.Ticker;
+import java.time.Duration;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.exceptions.AlreadyExistsException;
+import org.apache.iceberg.exceptions.NamespaceNotEmptyException;
+import org.apache.iceberg.exceptions.NoSuchNamespaceException;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+
+/**
+ * Class that wraps an Iceberg Catalog to cache tables.
+ * Initial code in:
+ * https://github.com/apache/iceberg/blob/1.3.x/core/src/main/java/org/apache/iceberg/CachingCatalog.java
+ * Main difference is the SupportsNamespace and the fact that loadTable performs a metadata refresh.
+ *
+ *
See {@link CatalogProperties#CACHE_EXPIRATION_INTERVAL_MS} for more details regarding special
+ * values for {@code expirationIntervalMillis}.
+ */
+public class HiveCachingCatalog implements Catalog, SupportsNamespaces {
+ private static final Logger LOG = LoggerFactory.getLogger(HiveCachingCatalog.class);
+ @SuppressWarnings("checkstyle:VisibilityModifier")
+ protected final long expirationIntervalMillis;
+ @SuppressWarnings("checkstyle:VisibilityModifier")
+ protected final Cache tableCache;
+ private final CATALOG catalog;
+ private final boolean caseSensitive;
+
+ @SuppressWarnings("checkstyle:VisibilityModifier")
+ protected HiveCachingCatalog(CATALOG catalog, long expirationIntervalMillis) {
+ Preconditions.checkArgument(
+ expirationIntervalMillis != 0,
+ "When %s is set to 0, the catalog cache should be disabled. This indicates a bug.",
+ CatalogProperties.CACHE_EXPIRATION_INTERVAL_MS);
+ this.catalog = catalog;
+ this.caseSensitive = true;
+ this.expirationIntervalMillis = expirationIntervalMillis;
+ this.tableCache = createTableCache(Ticker.systemTicker());
+ }
+
+ public CATALOG unwrap() {
+ return catalog;
+ }
+
+ public static
+ HiveCachingCatalog wrap(C catalog, long expirationIntervalMillis) {
+ return new HiveCachingCatalog(catalog, expirationIntervalMillis);
+ }
+
+
+ private Cache createTableCache(Ticker ticker) {
+ Caffeine
*
*
This implementation performs user extraction and eventual JWT validation to
* execute (servlet service) methods within the context of the retrieved UserGroupInformation.
@@ -114,7 +122,7 @@ public class ProxyServlet extends HttpServlet {
this.delegate = delegate;
}
- public void init() throws ServletException {
+ @Override public void init() throws ServletException {
ServletSecurity.this.init();
delegate.init();
}
@@ -162,7 +170,7 @@ public interface MethodExecutor {
public void execute(HttpServletRequest request, HttpServletResponse response, MethodExecutor executor)
throws IOException {
if (LOG.isDebugEnabled()) {
- LOG.debug("Logging headers in "+request.getMethod()+" request");
+ LOG.debug("Logging headers in {} request", request.getMethod());
Enumeration headerNames = request.getHeaderNames();
while (headerNames.hasMoreElements()) {
String headerName = headerNames.nextElement();
@@ -170,9 +178,9 @@ public void execute(HttpServletRequest request, HttpServletResponse response, Me
request.getHeader(headerName));
}
}
+ final UserGroupInformation clientUgi;
try {
String userFromHeader = extractUserName(request, response);
- UserGroupInformation clientUgi;
// Temporary, and useless for now. Here only to allow this to work on an otherwise kerberized
// server.
if (isSecurityEnabled || jwtAuthEnabled) {
@@ -182,25 +190,25 @@ public void execute(HttpServletRequest request, HttpServletResponse response, Me
LOG.info("Creating remote user for: {}", userFromHeader);
clientUgi = UserGroupInformation.createRemoteUser(userFromHeader);
}
- PrivilegedExceptionAction action = () -> {
- executor.execute(request, response);
- return null;
- };
- try {
- clientUgi.doAs(action);
- } catch (InterruptedException e) {
- LOG.error("Exception when executing http request as user: " + clientUgi.getUserName(), e);
- Thread.currentThread().interrupt();
- } catch (RuntimeException e) {
- LOG.error("Exception when executing http request as user: " + clientUgi.getUserName(),
- e);
- throw new IOException(e);
- }
} catch (HttpAuthenticationException e) {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
response.getWriter().println("Authentication error: " + e.getMessage());
// Also log the error message on server side
LOG.error("Authentication error: ", e);
+ // no need to go further
+ return;
+ }
+ final PrivilegedExceptionAction action = () -> {
+ executor.execute(request, response);
+ return null;
+ };
+ try {
+ clientUgi.doAs(action);
+ } catch (InterruptedException e) {
+ LOG.info("Interrupted when executing http request as user: {}", clientUgi.getUserName(), e);
+ Thread.currentThread().interrupt();
+ } catch (RuntimeException e) {
+ throw new IOException("Exception when executing http request as user: "+ clientUgi.getUserName(), e);
}
}
From 4921457b56b9301fd125429a37c96ef817a53875 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Sat, 15 Feb 2025 19:52:25 +0100
Subject: [PATCH 21/40] HIVE-28059 : nit, javadoc (again!);
---
.../hadoop/hive/metastore/PropertyServlet.java | 1 +
.../hadoop/hive/metastore/ServletSecurity.java | 14 ++++++--------
2 files changed, 7 insertions(+), 8 deletions(-)
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index 9633c2065d4b..9965b52dda6c 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -138,6 +138,7 @@ private void writeJson(HttpServletResponse response, Object value) throws IOExce
writer.flush();
}
+ @Override
public void init() throws ServletException {
super.init();
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
index c851b09125f7..b0a226e116c1 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
@@ -52,27 +52,25 @@
* method invokes the executor through a {@link PrivilegedAction} in the expected {@link UserGroupInformation} context.
*
* A typical usage in a servlet is the following:
- *
* As a convenience, instead of embedding the security instance, one can wrap an existing servlet in a proxy that
* will ensure all its service methods are called with the expected {@link UserGroupInformation} .
- *
This implementation performs user extraction and eventual JWT validation to
From 9b7eb152021c0bd22eebf95c4d805a48e456e92d Mon Sep 17 00:00:00 2001
From: Henrib
Date: Sun, 16 Feb 2025 10:28:29 +0100
Subject: [PATCH 22/40] HIVE-28059 : javadoc (again!!);
---
.../hadoop/hive/metastore/ServletSecurity.java | 13 +++++++------
1 file changed, 7 insertions(+), 6 deletions(-)
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
index b0a226e116c1..21f39eb9f932 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
@@ -63,18 +63,19 @@
* }
*
*
- * As a convenience, instead of embedding the security instance, one can wrap an existing servlet in a proxy that
- * will ensure all its service methods are called with the expected {@link UserGroupInformation} .
- *
+ * As a convenience, instead of embedding the security instance, one can wrap an existing servlet in a proxy that
+ * will ensure all its service methods are called with the expected {@link UserGroupInformation} .
+ *
+ * This implementation performs user extraction and eventual JWT validation to
+ * execute (servlet service) methods within the context of the retrieved UserGroupInformation.
*
- *
- *
This implementation performs user extraction and eventual JWT validation to
- * execute (servlet service) methods within the context of the retrieved UserGroupInformation.
*/
public class ServletSecurity {
private static final Logger LOG = LoggerFactory.getLogger(ServletSecurity.class);
From 4c41e9ce749852b1a3b1511b4713b984f495f2b8 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Sun, 16 Feb 2025 17:39:38 +0100
Subject: [PATCH 23/40] HIVE-28059 : fix regression on PropertyServlet & test;
---
.../java/org/apache/hadoop/hive/metastore/PropertyServlet.java | 2 +-
.../apache/hadoop/hive/metastore/properties/HMSTestBase.java | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index 9965b52dda6c..0fecc92f2514 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -332,7 +332,7 @@ public static Server startServer(Configuration conf) throws Exception {
return null;
}
String path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
- ServletSecurity security = new ServletSecurity(conf);
+ ServletSecurity security = new ServletSecurity(conf, PropertyServlet.isAuthJwt(conf));
Servlet servlet = security.proxy(new PropertyServlet(conf));
// HTTP Server
Server server = new Server();
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
index 30635e8bbeee..66fefbe0faf3 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
@@ -111,7 +111,7 @@ public void setUp() throws Exception {
NS = "hms" + RND.nextInt(100);
conf = MetastoreConf.newMetastoreConf();
MetaStoreTestUtils.setConfForStandloneMode(conf);
-
+ MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_METASTORE_AUTHENTICATION, "jwt");
MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
// Events that get cleaned happen in batches of 1 to exercise batching code
MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.EVENT_CLEAN_MAX_EVENTS, 1L);
From 507dd6e8e937bd967691a6e64bd8ab5c39366481 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Mon, 17 Feb 2025 09:19:42 +0100
Subject: [PATCH 24/40] HIVE-28059 : fix regression on PropertyServlet test;
---
.../org/apache/hadoop/hive/metastore/properties/HMSTestBase.java | 1 -
1 file changed, 1 deletion(-)
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
index 66fefbe0faf3..4023076c04da 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
@@ -111,7 +111,6 @@ public void setUp() throws Exception {
NS = "hms" + RND.nextInt(100);
conf = MetastoreConf.newMetastoreConf();
MetaStoreTestUtils.setConfForStandloneMode(conf);
- MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_METASTORE_AUTHENTICATION, "jwt");
MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
// Events that get cleaned happen in batches of 1 to exercise batching code
MetastoreConf.setLongVar(conf, MetastoreConf.ConfVars.EVENT_CLEAN_MAX_EVENTS, 1L);
From edd71486f1b639f97f5904686571c2044f01e35c Mon Sep 17 00:00:00 2001
From: Henrib
Date: Tue, 18 Feb 2025 18:59:54 +0100
Subject: [PATCH 25/40] HIVE-28059 : fix nits; - Created helper class to
abstract creating embedded Jetty serving one servlet on one port; - Added
embedded jetty configuration generic properties (threadpool); - Use
ServletServerBuilder to create PropertyServlet and HMSCatalogServlet servers;
---
.../iceberg/rest/HMSCatalogAdapter.java | 37 +++--
.../apache/iceberg/rest/HMSCatalogServer.java | 123 ++++++--------
.../hive/metastore/conf/MetastoreConf.java | 12 ++
.../hive/metastore/PropertyServlet.java | 72 ++++-----
.../hive/metastore/ServletServerBuilder.java | 151 ++++++++++++++++++
5 files changed, 267 insertions(+), 128 deletions(-)
create mode 100644 standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
index 2662b9f2d1be..776d53dbdb11 100644
--- a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
+++ b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
@@ -90,7 +90,7 @@ public class HMSCatalogAdapter implements RESTClient {
.put(NamespaceNotSupported.class, 400)
.put(IllegalArgumentException.class, 400)
.put(ValidationException.class, 400)
- .put(NamespaceNotEmptyException.class, 400) // TODO: should this be more specific?
+ .put(NamespaceNotEmptyException.class, 400)
.put(NotAuthorizedException.class, 401)
.put(ForbiddenException.class, 403)
.put(NoSuchNamespaceException.class, 404)
@@ -103,6 +103,15 @@ public class HMSCatalogAdapter implements RESTClient {
.put(CommitStateUnknownException.class, 500)
.buildOrThrow();
+ private static final String URN_OAUTH_TOKEN_EXCHANGE = "urn:ietf:params:oauth:grant-type:token-exchange";
+ private static final String URN_OAUTH_ACCESS_TOKEN = "urn:ietf:params:oauth:token-type:access_token";
+ private static final String GRANT_TYPE = "grant_type";
+ private static final String CLIENT_CREDENTIALS = "client_credentials";
+ private static final String BEARER = "Bearer";
+ private static final String CLIENT_ID = "client_id";
+ private static final String ACTOR_TOKEN = "actor_token";
+ private static final String SUBJECT_TOKEN = "subject_token";
+
private final Catalog catalog;
private final SupportsNamespaces asNamespaceCatalog;
private final ViewCatalog asViewCatalog;
@@ -288,24 +297,24 @@ private ConfigResponse config() {
private OAuthTokenResponse tokens(Object body) {
Map request = (Map) castRequest(Map.class, body);
- String grantType = request.get("grant_type");
+ String grantType = request.get(GRANT_TYPE);
switch (grantType) {
- case "client_credentials":
+ case CLIENT_CREDENTIALS:
return OAuthTokenResponse.builder()
- .withToken("client-credentials-token:sub=" + request.get("client_id"))
- .withTokenType("Bearer")
+ .withToken("client-credentials-token:sub=" + request.get(CLIENT_ID))
+ .withTokenType(BEARER)
.build();
- case "urn:ietf:params:oauth:grant-type:token-exchange":
- String actor = request.get("actor_token");
+ case URN_OAUTH_TOKEN_EXCHANGE:
+ String actor = request.get(ACTOR_TOKEN);
String token =
String.format(
"token-exchange-token:sub=%s%s",
- request.get("subject_token"), actor != null ? ",act=" + actor : "");
+ request.get(SUBJECT_TOKEN), actor != null ? ",act=" + actor : "");
return OAuthTokenResponse.builder()
.withToken(token)
- .withIssuedTokenType("urn:ietf:params:oauth:token-type:access_token")
- .withTokenType("Bearer")
+ .withIssuedTokenType(URN_OAUTH_ACCESS_TOKEN)
+ .withTokenType(BEARER)
.build();
default:
@@ -315,13 +324,13 @@ private OAuthTokenResponse tokens(Object body) {
private ListNamespacesResponse listNamespaces(Map vars) {
if (asNamespaceCatalog != null) {
- Namespace ns;
+ Namespace namespace;
if (vars.containsKey("parent")) {
- ns = Namespace.of(RESTUtil.NAMESPACE_SPLITTER.splitToStream(vars.get("parent")).toArray(String[]::new));
+ namespace = Namespace.of(RESTUtil.NAMESPACE_SPLITTER.splitToStream(vars.get("parent")).toArray(String[]::new));
} else {
- ns = Namespace.empty();
+ namespace = Namespace.empty();
}
- return castResponse(ListNamespacesResponse.class, CatalogHandlers.listNamespaces(asNamespaceCatalog, ns));
+ return castResponse(ListNamespacesResponse.class, CatalogHandlers.listNamespaces(asNamespaceCatalog, namespace));
}
throw new NamespaceNotSupported(catalog.toString());
}
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
index 076dcbc5d278..76394861765c 100644
--- a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
+++ b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
@@ -16,7 +16,6 @@
* specific language governing permissions and limitations
* under the License.
*/
-
package org.apache.iceberg.rest;
import java.io.IOException;
@@ -28,49 +27,53 @@
import javax.servlet.http.HttpServlet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.hadoop.hive.metastore.ServletServerBuilder;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.hive.HiveCatalog;
-import org.eclipse.jetty.server.ConnectionFactory;
-import org.eclipse.jetty.server.Connector;
-import org.eclipse.jetty.server.HttpConfiguration;
-import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.server.handler.gzip.GzipHandler;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
/**
- * Iceberg Catalog server.
+ * Iceberg Catalog server creator.
*/
-public class HMSCatalogServer {
+public class HMSCatalogServer extends ServletServerBuilder {
private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogServer.class);
- private static final AtomicReference> catalogRef = new AtomicReference<>();
+ protected static final AtomicReference> catalogRef = new AtomicReference<>();
public static Catalog getLastCatalog() {
Reference soft = catalogRef.get();
- return soft != null ? soft.get() : null;
+ return soft != null ? soft.get() : null;
}
-
+
protected static void setLastCatalog(Catalog catalog) {
catalogRef.set(new SoftReference<>(catalog));
}
- private HMSCatalogServer() {
- // nothing
+ protected final int port;
+ protected final String path;
+ protected Catalog catalog;
+
+ protected HMSCatalogServer(Configuration conf, Catalog catalog) {
+ super(conf);
+ port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PORT);
+ path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH);
+ this.catalog = catalog;
}
- protected HttpServlet createServlet(ServletSecurity security, Catalog catalog) throws IOException {
- return security.proxy(new HMSCatalogServlet(new HMSCatalogAdapter(catalog)));
+ @Override
+ protected String getServletPath() {
+ return path;
}
- protected Catalog createCatalog(Configuration configuration) {
+ @Override
+ protected int getServerPort() {
+ return port;
+ }
+
+ protected Catalog createCatalog() {
final Map properties = new TreeMap<>();
final String configUri = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS);
if (configUri != null) {
@@ -89,81 +92,45 @@ protected Catalog createCatalog(Configuration configuration) {
final String catalogName = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.CATALOG_DEFAULT);
catalog.initialize(catalogName, properties);
long expiry = MetastoreConf.getLongVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_CACHE_EXPIRY);
- return expiry > 0? new HMSCachingCatalog(catalog, expiry) : catalog;
+ return expiry > 0 ? new HMSCachingCatalog(catalog, expiry) : catalog;
}
- protected HttpServlet createServlet(Configuration configuration, Catalog catalog) throws IOException {
+ protected HttpServlet createServlet(Catalog catalog) throws IOException {
ServletSecurity security = new ServletSecurity(configuration);
+ return security.proxy(new HMSCatalogServlet(new HMSCatalogAdapter(catalog)));
+ }
+
+ @Override
+ protected HttpServlet createServlet() throws IOException {
Catalog actualCatalog = catalog;
if (actualCatalog == null) {
MetastoreConf.setVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS, "");
- actualCatalog = createCatalog(configuration);
+ actualCatalog = catalog = createCatalog();
}
setLastCatalog(actualCatalog);
- return createServlet(security, actualCatalog);
+ return createServlet(actualCatalog);
}
-
- /**
- * Convenience method to start a http server that only serves this servlet.
- * @param conf the configuration
- * @param catalog the catalog instance to serve
- * @return the server instance
- * @throws Exception if servlet initialization fails
- */
- protected Server startServer(Configuration conf, HiveCatalog catalog) throws Exception {
- int port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PORT);
- if (port < 0) {
- return null;
- }
- final HttpServlet servlet = createServlet(conf, catalog);
- ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
- context.setContextPath("/");
- ServletHolder servletHolder = new ServletHolder(servlet);
- servletHolder.setInitParameter("javax.ws.rs.Application", "ServiceListPublic");
- final String cli = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH);
- context.addServlet(servletHolder, "/" + cli + "/*");
- context.setVirtualHosts(null);
- context.setGzipHandler(new GzipHandler());
- final Server httpServer = createHttpServer(conf, port);
- httpServer.setHandler(context);
- LOG.info("Starting HMS REST Catalog Server with context path:/{}/ on port:{}", cli, port);
- httpServer.start();
- return httpServer;
- }
-
- private static Server createHttpServer(Configuration conf, int port) throws IOException {
- final int maxThreads = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_MAX);
- final int minThreads = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_MIN);
- final int idleTimeout = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_IDLE);
+ @Override
+ protected Server createServer() {
+ final int maxThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_MAX);
+ final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_MIN);
+ final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_IDLE);
final QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads, minThreads, idleTimeout);
- final Server httpServer = new Server(threadPool);
- httpServer.setStopAtShutdown(true);
- final SslContextFactory sslContextFactory = ServletSecurity.createSslContextFactory(conf);
- final ServerConnector connector = new ServerConnector(httpServer, sslContextFactory);
- connector.setPort(port);
- connector.setReuseAddress(true);
- httpServer.setConnectors(new Connector[] {connector});
- for (ConnectionFactory factory : connector.getConnectionFactories()) {
- if (factory instanceof HttpConnectionFactory) {
- HttpConnectionFactory httpFactory = (HttpConnectionFactory) factory;
- HttpConfiguration httpConf = httpFactory.getHttpConfiguration();
- httpConf.setSendServerVersion(false);
- httpConf.setSendXPoweredBy(false);
- }
- }
- return httpServer;
+ return new Server(threadPool);
}
-
/**
- * Convenience method to start a http server that only serves this servlet.
- *
This one is looked up through reflection to start from HMS.
+ * Convenience method to start a http server that only serves the Iceberg
+ * catalog servlet.
+ *
+ * This one is looked up through reflection to start from HMS.
+ *
* @param conf the configuration
* @return the server instance
* @throws Exception if servlet initialization fails
*/
public static Server startServer(Configuration conf) throws Exception {
- return new HMSCatalogServer().startServer(conf, null);
+ return new HMSCatalogServer(conf, null).startServer();
}
}
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
index 1745e8e1a3d2..eb0c6cce90d5 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
@@ -1856,6 +1856,18 @@ public enum ConfVars {
"hive.metastore.catalog.cache.expiry", 60_000L,
"HMS Iceberg Catalog cache expiry."
),
+ EMBEDDED_JETTY_THREADPOOL_MIN("hive.metastore.embedded.jetty.threadpool.min",
+ "hive.metastore.embedded.jetty.threadpool.min", 2,
+ "HMS embedded Jetty server(s) minimum number of threads."
+ ),
+ EMBEDDED_JETTY_THREADPOOL_MAX("hive.metastore.embedded.jetty.threadpool.max",
+ "hive.metastore.embedded.jetty.threadpool.max", 256,
+ "HMS embedded Jetty server(s) maximum number of threads."
+ ),
+ EMBEDDED_JETTY_THREADPOOL_IDLE("hive.metastore.embedded.jetty.threadpool.idle",
+ "hive.metastore.embedded.jetty.threadpool.idle", 60_000L,
+ "HMS embedded Jetty server(s) thread idle time."
+ ),
// Deprecated Hive values that we are keeping for backwards compatibility.
@Deprecated
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index 0fecc92f2514..8a2a7fe14aa8 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -29,15 +29,9 @@
import org.apache.hadoop.hive.metastore.properties.PropertyMap;
import org.apache.hadoop.hive.metastore.properties.PropertyStore;
import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.servlet.ServletHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.servlet.Source;
-import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.servlet.Servlet;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServlet;
@@ -319,6 +313,35 @@ protected void doGet(HttpServletRequest request,
}
}
+ /**
+ * Single servlet creation helper.
+ */
+ private static class ServerBuilder extends ServletServerBuilder {
+ final int port;
+ final String path;
+ ServerBuilder(Configuration conf) {
+ super(conf);
+ port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PORT);
+ path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
+ }
+
+ @Override
+ protected String getServletPath() {
+ return path;
+ }
+
+ @Override
+ protected int getServerPort() {
+ return port;
+ }
+
+ @Override
+ protected HttpServlet createServlet() throws IOException {
+ ServletSecurity security = new ServletSecurity(configuration, PropertyServlet.isAuthJwt(configuration));
+ return security.proxy(new PropertyServlet(configuration));
+ }
+ }
+
/**
* Convenience method to start a http server that only serves this servlet.
* @param conf the configuration
@@ -326,36 +349,13 @@ protected void doGet(HttpServletRequest request,
* @throws Exception if servlet initialization fails
*/
public static Server startServer(Configuration conf) throws Exception {
- // no port, no server
- int port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PORT);
- if (port < 0) {
- return null;
- }
- String path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
- ServletSecurity security = new ServletSecurity(conf, PropertyServlet.isAuthJwt(conf));
- Servlet servlet = security.proxy(new PropertyServlet(conf));
- // HTTP Server
- Server server = new Server();
- server.setStopAtShutdown(true);
-
- // Optional SSL
- final SslContextFactory sslContextFactory = ServletSecurity.createSslContextFactory(conf);
- final ServerConnector connector = new ServerConnector(server, sslContextFactory);
- connector.setPort(port);
- connector.setReuseAddress(true);
- server.addConnector(connector);
-
- // Hook the servlet
- ServletHandler handler = new ServletHandler();
- server.setHandler(handler);
- ServletHolder holder = handler.newServletHolder(Source.EMBEDDED);
- holder.setServlet(servlet); //
- handler.addServletWithMapping(holder, "/"+path+"/*");
- server.start();
- if (!server.isStarted()) {
- LOGGER.error("unable to start property-maps servlet server, path {}, port {}", path, port);
- } else {
- LOGGER.info("started property-maps servlet server on {}", server.getURI());
+ Server server = new ServerBuilder(conf).startServer();
+ if (server != null) {
+ if (!server.isStarted()) {
+ LOGGER.error("Unable to start property-maps servlet server on {}", server.getURI());
+ } else {
+ LOGGER.info("Started property-maps servlet server on {}", server.getURI());
+ }
}
return server;
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
new file mode 100644
index 000000000000..dd2f355cfc32
--- /dev/null
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import java.io.IOException;
+import javax.servlet.http.HttpServlet;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+
+/**
+ * Helper class to ease creation of embedded Jetty serving one servlet on a given port.
+ *
When using Jetty, the easiest way - and may be only - to serve different servlets
+ * on different ports is to create 2 separate Jetty instances; this helper eases creation
+ * of such a dedicated server.
+ */
+public abstract class ServletServerBuilder {
+ /**
+ * The configuration instance.
+ */
+ protected final Configuration configuration;
+
+ /**
+ * Creates a builder instance.
+ * @param conf the configuration
+ */
+ protected ServletServerBuilder(Configuration conf) {
+ this.configuration = conf;
+ }
+ /**
+ * Gets the servlet path.
+ * @return the path
+ */
+ protected abstract String getServletPath();
+
+ /**
+ * Gets the server port.
+ * @return the port
+ */
+ protected abstract int getServerPort();
+
+ /**
+ * Creates the servlet instance.
+ *
It is often advisable to use {@link ServletSecurity} to proxy the actual servlet instance.
+ * @return the servlet instance
+ * @throws IOException if servlet creation fails
+ */
+ protected abstract HttpServlet createServlet() throws IOException;
+
+ /**
+ * Creates the servlet context.
+ * @param servlet the servlet
+ * @return a context instance
+ */
+ protected ServletContextHandler createContext(HttpServlet servlet) {
+ // hook the servlet
+ ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
+ context.setContextPath("/");
+ ServletHolder servletHolder = new ServletHolder(servlet);
+ servletHolder.setInitParameter("javax.ws.rs.Application", "ServiceListPublic");
+ final String path = getServletPath();
+ context.addServlet(servletHolder, "/" + path + "/*");
+ context.setVirtualHosts(null);
+ context.setGzipHandler(new GzipHandler());
+ return context;
+ }
+
+ /**
+ * Creates a server instance.
+ *
Default use configuration to determine threadpool constants?
+ * @return the server instance
+ * @throws IOException if server creation fails
+ */
+ protected Server createServer() throws IOException {
+ final int maxThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_MAX);
+ final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_MIN);
+ final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_IDLE);
+ final QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads, minThreads, idleTimeout);
+ return new Server(threadPool);
+ }
+
+ /**
+ * Creates a server instance and a connector on a given port.
+ * @param port the port
+ * @return the server instance listening to the port
+ * @throws IOException if server creation fails
+ */
+ protected Server createServer(int port) throws IOException {
+ final Server server = createServer();
+ server.setStopAtShutdown(true);
+ final SslContextFactory sslContextFactory = ServletSecurity.createSslContextFactory(configuration);
+ final ServerConnector connector = new ServerConnector(server, sslContextFactory);
+ connector.setPort(port);
+ connector.setReuseAddress(true);
+ server.addConnector(connector);
+ HttpConnectionFactory httpFactory = connector.getConnectionFactory(HttpConnectionFactory.class);
+ // do not leak information
+ if (httpFactory != null) {
+ HttpConfiguration httpConf = httpFactory.getHttpConfiguration();
+ httpConf.setSendServerVersion(false);
+ httpConf.setSendXPoweredBy(false);
+ }
+ return server;
+ }
+
+ /**
+ * Convenience method to start a http server that only serves this servlet.
+ * @return the server instance or null if port < 0
+ * @throws Exception if servlet initialization fails
+ */
+ public Server startServer() throws Exception {
+ int port = getServerPort();
+ if (port < 0) {
+ return null;
+ }
+ // create the servlet
+ final HttpServlet servlet = createServlet();
+ // hook the servlet
+ ServletContextHandler context = createContext(servlet);
+ // Http server
+ final Server httpServer = createServer(port);
+ httpServer.setHandler(context);
+ httpServer.start();
+ return httpServer;
+ }
+
+}
From 0a6381cfcde34d9206e54f86c3c7437712c1f5b9 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Wed, 19 Feb 2025 12:38:11 +0100
Subject: [PATCH 26/40] HIVE-28059 : fixing more nits; - moving to
metastore-iceberg-catalog; - removing never thrown exceptions from
signatures; - fixing javadoc;
---
.../pom.xml | 17 ++++++++++++++++-
.../apache/iceberg/rest/HMSCachingCatalog.java | 0
.../apache/iceberg/rest/HMSCatalogAdapter.java | 8 +++-----
.../apache/iceberg/rest/HMSCatalogServer.java | 10 +++++++++-
.../apache/iceberg/rest/HMSCatalogServlet.java | 5 ++---
.../apache/iceberg/hive/IcebergTestHelper.java | 0
.../org/apache/iceberg/rest/HMSTestBase.java | 11 ++---------
.../org/apache/iceberg/rest/TestHMSCatalog.java | 0
.../resources/auth/jwt/jwt-authorized-key.json | 0
.../auth/jwt/jwt-unauthorized-key.json | 0
.../auth/jwt/jwt-verification-jwks.json | 0
.../src/test/resources/hive-log4j2.properties | 0
.../hadoop/hive/metastore/PropertyServlet.java | 2 +-
.../hadoop/hive/metastore/ServletSecurity.java | 2 +-
standalone-metastore/pom.xml | 2 +-
15 files changed, 35 insertions(+), 22 deletions(-)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/pom.xml (93%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java (98%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java (93%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java (96%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/test/java/org/apache/iceberg/rest/HMSTestBase.java (98%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/test/resources/auth/jwt/jwt-authorized-key.json (100%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/test/resources/auth/jwt/jwt-unauthorized-key.json (100%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/test/resources/auth/jwt/jwt-verification-jwks.json (100%)
rename standalone-metastore/{metastore-catalog => metastore-iceberg-catalog}/src/test/resources/hive-log4j2.properties (100%)
diff --git a/standalone-metastore/metastore-catalog/pom.xml b/standalone-metastore/metastore-iceberg-catalog/pom.xml
similarity index 93%
rename from standalone-metastore/metastore-catalog/pom.xml
rename to standalone-metastore/metastore-iceberg-catalog/pom.xml
index dcf552755bd7..812d0b36bdfb 100644
--- a/standalone-metastore/metastore-catalog/pom.xml
+++ b/standalone-metastore/metastore-iceberg-catalog/pom.xml
@@ -17,7 +17,7 @@
4.1.0-SNAPSHOT4.0.0
- hive-standalone-metastore-icecat
+ hive-standalone-metastore-iceberg-catalogHive Metastore Iceberg Catalog..
@@ -59,6 +59,21 @@
iceberg-bundled-guava${iceberg.version}
+
+
org.apache.hivehive-standalone-metastore-common
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java b/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
rename to standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java b/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
similarity index 98%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
rename to standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
index 776d53dbdb11..80652a2a7ec6 100644
--- a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
+++ b/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
@@ -77,7 +77,7 @@
import org.apache.iceberg.util.PropertyUtil;
/**
- * Original @ https://github.com/apache/iceberg/blob/main/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java
+ * Original @ https://github.com/apache/iceberg/blob/1.6.x/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java
* Adaptor class to translate REST requests into {@link Catalog} API calls.
*/
public class HMSCatalogAdapter implements RESTClient {
@@ -696,10 +696,8 @@ public T postForm(
}
@Override
- public void close() throws IOException {
- // The calling test is responsible for closing the underlying catalog backing this REST catalog
- // so that the underlying backend catalog is not closed and reopened during the REST catalog's
- // initialize method when fetching the server configuration.
+ public void close() {
+ // The caller is responsible for closing the underlying catalog backing this REST catalog.
}
private static class NamespaceNotSupported extends RuntimeException {
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java b/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
similarity index 93%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
rename to standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
index 76394861765c..6e24ffbddc72 100644
--- a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
+++ b/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
@@ -131,6 +131,14 @@ protected Server createServer() {
* @throws Exception if servlet initialization fails
*/
public static Server startServer(Configuration conf) throws Exception {
- return new HMSCatalogServer(conf, null).startServer();
+ Server server = new HMSCatalogServer(conf, null).startServer();
+ if (server != null) {
+ if (!server.isStarted()) {
+ LOG.error("Unable to start property-maps servlet server on {}", server.getURI());
+ } else {
+ LOG.info("Started property-maps servlet server on {}", server.getURI());
+ }
+ }
+ return server;
}
}
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java b/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
similarity index 96%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
rename to standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
index 22acfd190214..bc581fc26b9c 100644
--- a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
+++ b/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
@@ -29,7 +29,6 @@
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
-import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -45,7 +44,7 @@
import org.slf4j.LoggerFactory;
/**
- * Original @ https://github.com/apache/iceberg/blob/main/core/src/test/java/org/apache/iceberg/rest/RESTCatalogServlet.java
+ * Original @ https://github.com/apache/iceberg/blob/1.6.x/core/src/test/java/org/apache/iceberg/rest/RESTCatalogServlet.java
* The RESTCatalogServlet provides a servlet implementation used in combination with a
* RESTCatalogAdaptor to proxy the REST Spec to any Catalog implementation.
*/
@@ -60,7 +59,7 @@ public HMSCatalogServlet(HMSCatalogAdapter restCatalogAdapter) {
}
@Override
- protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+ protected void service(HttpServletRequest request, HttpServletResponse response) {
try {
ServletRequestContext context = ServletRequestContext.from(request);
response.setStatus(HttpServletResponse.SC_OK);
diff --git a/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java b/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
rename to standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
diff --git a/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java b/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
similarity index 98%
rename from standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
rename to standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
index 045964bfef64..989aeaea5682 100644
--- a/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
+++ b/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
@@ -37,6 +37,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.io.Reader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
@@ -433,15 +434,7 @@ static String serialize(T object) {
}
}
- static T deserialize(String s) {
- try {
- return MAPPER.readValue(s, new TypeReference() {});
- } catch (JsonProcessingException xany) {
- throw new RuntimeException(xany);
- }
- }
-
- static T deserialize(BufferedReader s) {
+ static T deserialize(Reader s) {
try {
return MAPPER.readValue(s, new TypeReference() {});
} catch (IOException xany) {
diff --git a/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java b/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
rename to standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json b/standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
rename to standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json b/standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
rename to standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json b/standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
rename to standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/hive-log4j2.properties b/standalone-metastore/metastore-iceberg-catalog/src/test/resources/hive-log4j2.properties
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/hive-log4j2.properties
rename to standalone-metastore/metastore-iceberg-catalog/src/test/resources/hive-log4j2.properties
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index 8a2a7fe14aa8..f30f7b6563a9 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -336,7 +336,7 @@ protected int getServerPort() {
}
@Override
- protected HttpServlet createServlet() throws IOException {
+ protected HttpServlet createServlet() {
ServletSecurity security = new ServletSecurity(configuration, PropertyServlet.isAuthJwt(configuration));
return security.proxy(new PropertyServlet(configuration));
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
index 21f39eb9f932..71c9b20fb80e 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
@@ -53,7 +53,7 @@
*
* A typical usage in a servlet is the following:
*
- * SecureServletCaller security; // ...
+ * ServletSecurity security; // ...
* {@literal @}Override protected void doPost(HttpServletRequest request, HttpServletResponse response)
* throws ServletException, IOException {
* security.execute(request, response, this::runPost);
diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml
index 1dcfd8127654..6e55634f2fdd 100644
--- a/standalone-metastore/pom.xml
+++ b/standalone-metastore/pom.xml
@@ -29,7 +29,7 @@
metastore-commonmetastore-servermetastore-tools
- metastore-catalog
+ metastore-iceberg-catalog4.1.0-SNAPSHOT
From 2be93277d0b4768c4fa25e60d635bf263c6d760b Mon Sep 17 00:00:00 2001
From: Henrib
Date: Wed, 19 Feb 2025 21:18:52 +0100
Subject: [PATCH 27/40] Javadoc nit;
---
.../org/apache/hadoop/hive/metastore/ServletServerBuilder.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
index dd2f355cfc32..e2cb5c971900 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -129,7 +129,7 @@ protected Server createServer(int port) throws IOException {
/**
* Convenience method to start a http server that only serves this servlet.
- * @return the server instance or null if port < 0
+ * @return the server instance or null if port < 0
* @throws Exception if servlet initialization fails
*/
public Server startServer() throws Exception {
From 6c2b9308344c26ea829b3eca6845c643a2937d47 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Fri, 21 Feb 2025 12:39:12 +0100
Subject: [PATCH 28/40] HIVE-28059 : create only one Jetty instance for HMS
property & Iceberg Catalog servlets; - tests servlet builder including port
sharing or not; - moving module to metastore-catalog; - fixing nits;
---
.../pom.xml | 2 +-
.../iceberg/rest/HMSCachingCatalog.java | 0
.../iceberg/rest/HMSCatalogAdapter.java | 1 -
.../apache/iceberg/rest/HMSCatalogServer.java | 78 +++--
.../iceberg/rest/HMSCatalogServlet.java | 8 +-
.../iceberg/hive/IcebergTestHelper.java | 0
.../org/apache/iceberg/rest/HMSTestBase.java | 6 +-
.../apache/iceberg/rest/TestHMSCatalog.java | 2 +-
.../auth/jwt/jwt-authorized-key.json | 0
.../auth/jwt/jwt-unauthorized-key.json | 0
.../auth/jwt/jwt-verification-jwks.json | 0
.../src/test/resources/log4j2.properties} | 25 +-
.../hive/metastore/conf/MetastoreConf.java | 19 +-
.../hadoop/hive/metastore/HiveMetaStore.java | 44 ++-
.../hive/metastore/PropertyServlet.java | 58 ++--
.../hive/metastore/ServletServerBuilder.java | 270 ++++++++++++++----
.../metastore/TestServletServerBuilder.java | 233 +++++++++++++++
standalone-metastore/pom.xml | 2 +-
18 files changed, 558 insertions(+), 190 deletions(-)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/pom.xml (99%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java (100%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java (99%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java (69%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java (97%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java (100%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/test/java/org/apache/iceberg/rest/HMSTestBase.java (98%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java (99%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/test/resources/auth/jwt/jwt-authorized-key.json (100%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/test/resources/auth/jwt/jwt-unauthorized-key.json (100%)
rename standalone-metastore/{metastore-iceberg-catalog => metastore-catalog}/src/test/resources/auth/jwt/jwt-verification-jwks.json (100%)
rename standalone-metastore/{metastore-iceberg-catalog/src/test/resources/hive-log4j2.properties => metastore-catalog/src/test/resources/log4j2.properties} (71%)
create mode 100644 standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestServletServerBuilder.java
diff --git a/standalone-metastore/metastore-iceberg-catalog/pom.xml b/standalone-metastore/metastore-catalog/pom.xml
similarity index 99%
rename from standalone-metastore/metastore-iceberg-catalog/pom.xml
rename to standalone-metastore/metastore-catalog/pom.xml
index 812d0b36bdfb..865c5e8b6f02 100644
--- a/standalone-metastore/metastore-iceberg-catalog/pom.xml
+++ b/standalone-metastore/metastore-catalog/pom.xml
@@ -17,7 +17,7 @@
4.1.0-SNAPSHOT4.0.0
- hive-standalone-metastore-iceberg-catalog
+ hive-standalone-metastore-catalogHive Metastore Iceberg Catalog..
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
similarity index 100%
rename from standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
rename to standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
similarity index 99%
rename from standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
rename to standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
index 80652a2a7ec6..063dbb59e311 100644
--- a/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
+++ b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
@@ -21,7 +21,6 @@
import com.codahale.metrics.Counter;
import org.apache.hadoop.hive.metastore.metrics.Metrics;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
similarity index 69%
rename from standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
rename to standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
index 6e24ffbddc72..eb5886972eaf 100644
--- a/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
+++ b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
@@ -32,14 +32,13 @@
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.hive.HiveCatalog;
import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Iceberg Catalog server creator.
*/
-public class HMSCatalogServer extends ServletServerBuilder {
+public class HMSCatalogServer {
private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogServer.class);
protected static final AtomicReference> catalogRef = new AtomicReference<>();
@@ -52,29 +51,33 @@ protected static void setLastCatalog(Catalog catalog) {
catalogRef.set(new SoftReference<>(catalog));
}
+ protected final Configuration configuration;
protected final int port;
protected final String path;
protected Catalog catalog;
protected HMSCatalogServer(Configuration conf, Catalog catalog) {
- super(conf);
port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PORT);
path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH);
+ this.configuration = conf;
this.catalog = catalog;
}
-
- @Override
- protected String getServletPath() {
+
+ public int getPort() {
+ return port;
+ }
+
+ public String getPath() {
return path;
}
-
- @Override
- protected int getServerPort() {
- return port;
+
+ public Catalog getCatalog() {
+ return catalog;
}
protected Catalog createCatalog() {
final Map properties = new TreeMap<>();
+ MetastoreConf.setVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS, "");
final String configUri = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS);
if (configUri != null) {
properties.put("uri", configUri);
@@ -100,45 +103,40 @@ protected HttpServlet createServlet(Catalog catalog) throws IOException {
return security.proxy(new HMSCatalogServlet(new HMSCatalogAdapter(catalog)));
}
- @Override
protected HttpServlet createServlet() throws IOException {
- Catalog actualCatalog = catalog;
- if (actualCatalog == null) {
- MetastoreConf.setVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS, "");
- actualCatalog = catalog = createCatalog();
+ if (port >= 0 && path != null && !path.isEmpty()) {
+ Catalog actualCatalog = catalog;
+ if (actualCatalog == null) {
+ actualCatalog = catalog = createCatalog();
+ }
+ setLastCatalog(actualCatalog);
+ return createServlet(actualCatalog);
}
- setLastCatalog(actualCatalog);
- return createServlet(actualCatalog);
- }
-
- @Override
- protected Server createServer() {
- final int maxThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_MAX);
- final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_MIN);
- final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_JETTY_THREADPOOL_IDLE);
- final QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads, minThreads, idleTimeout);
- return new Server(threadPool);
+ return null;
}
+
/**
- * Convenience method to start a http server that only serves the Iceberg
- * catalog servlet.
- *
- * This one is looked up through reflection to start from HMS.
+ * Factory method to describe Iceberg servlet.
+ *
This one is looked up through reflection to start from HMS.
*
* @param conf the configuration
- * @return the server instance
- * @throws Exception if servlet initialization fails
+ * @return the servlet descriptor instance
*/
- public static Server startServer(Configuration conf) throws Exception {
- Server server = new HMSCatalogServer(conf, null).startServer();
- if (server != null) {
- if (!server.isStarted()) {
- LOG.error("Unable to start property-maps servlet server on {}", server.getURI());
- } else {
- LOG.info("Started property-maps servlet server on {}", server.getURI());
+ public static ServletServerBuilder.Descriptor createServlet(Configuration configuration) {
+ try {
+ HMSCatalogServer hms = new HMSCatalogServer(configuration, null);
+ HttpServlet servlet = hms.createServlet();
+ if (servlet != null) {
+ return new ServletServerBuilder.Descriptor(hms.getPort(), hms.getPath(), servlet) {
+ @Override public String toString() {
+ return "Iceberg REST Catalog";
+ }
+ };
}
+ } catch (Exception exception) {
+ LOG.error("failed to create servlet ", exception);
}
- return server;
+ return null;
}
}
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
similarity index 97%
rename from standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
rename to standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
index bc581fc26b9c..4b0e6a47080c 100644
--- a/standalone-metastore/metastore-iceberg-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
+++ b/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
@@ -32,8 +32,6 @@
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
-import org.apache.hc.core5.http.ContentType;
-import org.apache.hc.core5.http.HttpHeaders;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.io.CharStreams;
import org.apache.iceberg.rest.HMSCatalogAdapter.HTTPMethod;
@@ -50,9 +48,12 @@
*/
public class HMSCatalogServlet extends HttpServlet {
private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogServlet.class);
+ private static final String CONTENT_TYPE = "Content-Type";
+ private static final String APPLICATION_JSON = "application/json";
+
private final HMSCatalogAdapter restCatalogAdapter;
private final Map responseHeaders =
- ImmutableMap.of(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.getMimeType());
+ ImmutableMap.of(CONTENT_TYPE, APPLICATION_JSON);
public HMSCatalogServlet(HMSCatalogAdapter restCatalogAdapter) {
this.restCatalogAdapter = restCatalogAdapter;
@@ -64,7 +65,6 @@ protected void service(HttpServletRequest request, HttpServletResponse response)
ServletRequestContext context = ServletRequestContext.from(request);
response.setStatus(HttpServletResponse.SC_OK);
responseHeaders.forEach(response::setHeader);
-
final Optional error = context.error();
if (error.isPresent()) {
response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java b/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
similarity index 100%
rename from standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
rename to standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java b/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
similarity index 98%
rename from standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
rename to standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
index 989aeaea5682..abcce301ca22 100644
--- a/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
+++ b/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
@@ -68,7 +68,6 @@
import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
import org.apache.hadoop.hive.metastore.ObjectStore;
-import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.metrics.Metrics;
@@ -194,11 +193,10 @@ public void setUp() throws Exception {
// The manager decl
PropertyManager.declare(NS, HMSPropertyManager.class);
// The client
- HiveMetaStoreClient client = createClient(conf, port);
+ HiveMetaStoreClient client = createClient(conf);
Assert.assertNotNull("Unable to connect to the MetaStore server", client);
// create a managed root
- Warehouse wh = new Warehouse(conf);
String location = temp.newFolder("hivedb2023").getAbsolutePath();
Database db = new Database(DB_NAME, "catalog test", location, Collections.emptyMap());
client.createDatabase(db);
@@ -242,7 +240,7 @@ private static Catalog acquireServer(int[] port) throws InterruptedException {
}
}
- protected HiveMetaStoreClient createClient(Configuration conf, int port) throws Exception {
+ protected HiveMetaStoreClient createClient(Configuration conf) throws Exception {
MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_URIS, "");
MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.EXECUTE_SET_UGI, false);
return new HiveMetaStoreClient(conf);
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java b/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
similarity index 99%
rename from standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
rename to standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
index 1dd2034cb417..8b5a795e2c2d 100644
--- a/standalone-metastore/metastore-iceberg-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
+++ b/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
@@ -78,7 +78,7 @@ public void testCreateNamespaceHttp() throws Exception {
"\"properties\":{ \"owner\": \"apache\", \"group\" : \"iceberg\" }"
+"}");
Assert.assertNotNull(response);
- HiveMetaStoreClient client = createClient(conf, port);
+ HiveMetaStoreClient client = createClient(conf);
Database database1 = client.getDatabase(ns);
Assert.assertEquals("apache", database1.getParameters().get("owner"));
Assert.assertEquals("iceberg", database1.getParameters().get("group"));
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json b/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
similarity index 100%
rename from standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
rename to standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json b/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
similarity index 100%
rename from standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
rename to standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json b/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
similarity index 100%
rename from standalone-metastore/metastore-iceberg-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
rename to standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
diff --git a/standalone-metastore/metastore-iceberg-catalog/src/test/resources/hive-log4j2.properties b/standalone-metastore/metastore-catalog/src/test/resources/log4j2.properties
similarity index 71%
rename from standalone-metastore/metastore-iceberg-catalog/src/test/resources/hive-log4j2.properties
rename to standalone-metastore/metastore-catalog/src/test/resources/log4j2.properties
index 36aaa3ef9ccf..7d592ef2df94 100644
--- a/standalone-metastore/metastore-iceberg-catalog/src/test/resources/hive-log4j2.properties
+++ b/standalone-metastore/metastore-catalog/src/test/resources/log4j2.properties
@@ -17,23 +17,22 @@
name=PropertiesConfig
property.filename = logs
-appenders = console,captured
+appenders = console
appender.console.type = Console
appender.console.name = STDOUT
appender.console.layout.type = PatternLayout
-appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %c{1} - %msg%n
+appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %c{5} - %msg%n
-appender.captured.type = CapturingLogAppender
-appender.captured.name = CAPTURED
+rootLogger.level = INFO
+rootLogger.appenderRefs = stdout
+rootLogger.appenderRef.stdout.ref = STDOUT
-loggers=file
-logger.file.name=guru.springframework.blog.log4j2properties
-logger.file.level = debug
-logger.file.appenderRefs = file
-logger.file.appenderRef.file.ref = LOGFILE
+loggers = HttpClient, JettyHttpServer
+
+logger.HttpClient.name = org.apache.http.client
+logger.HttpClient.level = INFO
+
+logger.JettyHttpServer.name = org.eclipse.jetty.server
+logger.JettyHttpServer.level = INFO
-rootLogger.level = info
-rootLogger.appenderRefs = stdout,captured
-rootLogger.appenderRef.stdout.ref = STDOUT
-rootLogger.appenderRef.captured.ref = CAPTURED
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
index eb0c6cce90d5..ce91867922fe 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
@@ -1826,6 +1826,13 @@ public enum ConfVars {
new StringSetValidator("simple", "jwt"),
"Property-maps servlet authentication method (simple or jwt)."
),
+ ICEBERG_CATALOG_SERVLET_FACTORY("hive.metastore.catalog.servlet.factory",
+ "hive.metastore.catalog.servlet.factory",
+ "org.apache.iceberg.rest.HMSCatalogServer",
+ "HMS Iceberg Catalog servlet factory class name."
+ + "The factory needs to expose a method: "
+ + "public static HttpServlet createServlet(Configuration configuration);"
+ ),
ICEBERG_CATALOG_SERVLET_PATH("hive.metastore.catalog.servlet.path",
"hive.metastore.catalog.servlet.path", "iceberg",
"HMS Iceberg Catalog servlet path component of URL endpoint."
@@ -1840,18 +1847,6 @@ public enum ConfVars {
"hive.metastore.catalog.servlet.auth", "jwt",
"HMS Iceberg Catalog servlet authentication method (simple or jwt)."
),
- ICEBERG_CATALOG_JETTY_THREADPOOL_MIN("hive.metastore.catalog.jetty.threadpool.min",
- "hive.metastore.catalog.jetty.threadpool.min", 8,
- "HMS Iceberg Catalog embedded Jetty minimum number of threads."
- ),
- ICEBERG_CATALOG_JETTY_THREADPOOL_MAX("hive.metastore.catalog.jetty.threadpool.max",
- "hive.metastore.catalog.jetty.threadpool.max", 256,
- "HMS Iceberg Catalog embedded Jetty maximum number of threads."
- ),
- ICEBERG_CATALOG_JETTY_THREADPOOL_IDLE("hive.metastore.catalog.jetty.threadpool.idle",
- "hive.metastore.catalog.jetty.threadpool.idle", 60_000L,
- "HMS Iceberg Catalog embedded Jetty thread idle time."
- ),
ICEBERG_CATALOG_CACHE_EXPIRY("hive.metastore.catalog.cache.expiry",
"hive.metastore.catalog.cache.expiry", 60_000L,
"HMS Iceberg Catalog cache expiry."
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 13463355a284..1576cb6a2933 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -122,16 +122,15 @@ public class HiveMetaStore extends ThriftHiveMetastore {
private static ZooKeeperHiveHelper zooKeeperHelper = null;
private static String msHost = null;
private static ThriftServer thriftServer;
- private static Server propertyServer = null;
- private static Server icebergServer = null;
+ private static Server servletServer = null;
public static Server getPropertyServer() {
- return propertyServer;
+ return servletServer;
}
public static Server getIcebergServer() {
- return icebergServer;
+ return servletServer;
}
public static boolean isRenameAllowed(Database srcDB, Database destDB) {
@@ -317,22 +316,14 @@ public static void main(String[] args) throws Throwable {
if (isCliVerbose) {
System.err.println(shutdownMsg);
}
- // property server
- if (propertyServer != null) {
+ // servlet server
+ if (servletServer != null) {
try {
- propertyServer.stop();
+ servletServer.stop();
} catch (Exception e) {
LOG.error("Error stopping Property Map server.", e);
}
}
- // iceberg server
- if (icebergServer != null) {
- try {
- icebergServer.stop();
- } catch (Exception e) {
- LOG.error("Error stopping Iceberg API server.", e);
- }
- }
// metrics
if (MetastoreConf.getBoolVar(conf, ConfVars.METRICS_ENABLED)) {
try {
@@ -756,19 +747,24 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
throw e;
}
}
- // optionally create and start the property server and servlet
- propertyServer = PropertyServlet.startServer(conf);
- // optionally create and start the Iceberg REST server and servlet
- icebergServer = startIcebergCatalog(conf);
-
+ // optionally create and start the property and Iceberg REST server
+ servletServer = ServletServerBuilder.startServer(LOG, conf,
+ PropertyServlet::createServlet,
+ HiveMetaStore::createIcebergServlet);
thriftServer.start();
}
- static Server startIcebergCatalog(Configuration configuration) {
+ /**
+ * Creates the Iceberg REST catalog servlet descriptor.
+ * @param configuration the configuration
+ * @return the servlet descriptor (can be null)
+ */
+ static ServletServerBuilder.Descriptor createIcebergServlet(Configuration configuration) {
try {
- Class> iceClazz = Class.forName("org.apache.iceberg.rest.HMSCatalogServer");
- Method iceStart = iceClazz.getMethod("startServer", Configuration.class);
- return (Server) iceStart.invoke(null, configuration);
+ String className = MetastoreConf.getVar(configuration, ConfVars.ICEBERG_CATALOG_SERVLET_FACTORY);
+ Class> iceClazz = Class.forName(className);
+ Method iceStart = iceClazz.getMethod("createServlet", Configuration.class);
+ return (ServletServerBuilder.Descriptor) iceStart.invoke(null, configuration);
} catch (ClassNotFoundException xnf) {
LOG.warn("unable to start Iceberg REST Catalog server, missing jar?", xnf);
return null;
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index f30f7b6563a9..c5893ca4f1f8 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -69,6 +69,10 @@ static boolean isAuthJwt(Configuration configuration) {
this.configuration = configuration;
}
+ @Override public String getServletName() {
+ return "HMS property";
+ }
+
private String strError(String msg, Object...args) {
return String.format(PTYERROR + msg, args);
}
@@ -313,50 +317,34 @@ protected void doGet(HttpServletRequest request,
}
}
- /**
- * Single servlet creation helper.
- */
- private static class ServerBuilder extends ServletServerBuilder {
- final int port;
- final String path;
- ServerBuilder(Configuration conf) {
- super(conf);
- port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PORT);
- path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
- }
-
- @Override
- protected String getServletPath() {
- return path;
- }
-
- @Override
- protected int getServerPort() {
- return port;
- }
-
- @Override
- protected HttpServlet createServlet() {
- ServletSecurity security = new ServletSecurity(configuration, PropertyServlet.isAuthJwt(configuration));
- return security.proxy(new PropertyServlet(configuration));
+ public static ServletServerBuilder.Descriptor createServlet(Configuration configuration) {
+ try {
+ int port = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PORT);
+ String path = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
+ if (port >= 0 && path != null && !path.isEmpty()) {
+ ServletSecurity security = new ServletSecurity(configuration, PropertyServlet.isAuthJwt(configuration));
+ HttpServlet servlet = security.proxy(new PropertyServlet(configuration));
+ return new ServletServerBuilder.Descriptor(port, path, servlet) {
+ @Override public String toString() {
+ return "HMS property";
+ }
+ };
+ }
+ } catch (Exception io) {
+ LOGGER.error("failed to create servlet ", io);
}
+ return null;
}
/**
* Convenience method to start a http server that only serves this servlet.
+ *
* @param conf the configuration
* @return the server instance
* @throws Exception if servlet initialization fails
*/
public static Server startServer(Configuration conf) throws Exception {
- Server server = new ServerBuilder(conf).startServer();
- if (server != null) {
- if (!server.isStarted()) {
- LOGGER.error("Unable to start property-maps servlet server on {}", server.getURI());
- } else {
- LOGGER.info("Started property-maps servlet server on {}", server.getURI());
- }
- }
- return server;
+ return ServletServerBuilder.startServer(LOGGER, conf, PropertyServlet::createServlet);
}
+
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
index e2cb5c971900..54d77ba46e3f 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -19,6 +19,15 @@
package org.apache.hadoop.hive.metastore;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.IdentityHashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import javax.servlet.Servlet;
import javax.servlet.http.HttpServlet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
@@ -26,19 +35,24 @@
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.ContextHandlerCollection;
+import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.slf4j.Logger;
/**
- * Helper class to ease creation of embedded Jetty serving one servlet on a given port.
- *
When using Jetty, the easiest way - and may be only - to serve different servlets
- * on different ports is to create 2 separate Jetty instances; this helper eases creation
- * of such a dedicated server.
+ * Helper class to ease creation of embedded Jetty serving servlets on
+ * different ports.
*/
-public abstract class ServletServerBuilder {
+public class ServletServerBuilder {
+ /**
+ * Keeping track of descriptors.
+ */
+ private Map descriptorsMap = new IdentityHashMap<>();
/**
* The configuration instance.
*/
@@ -46,52 +60,86 @@ public abstract class ServletServerBuilder {
/**
* Creates a builder instance.
+ *
* @param conf the configuration
*/
protected ServletServerBuilder(Configuration conf) {
this.configuration = conf;
}
- /**
- * Gets the servlet path.
- * @return the path
- */
- protected abstract String getServletPath();
+
+ public Configuration getConfiguration() {
+ return configuration;
+ }
/**
- * Gets the server port.
- * @return the port
+ * A descriptor of a servlet.
+ *
After server is started, unspecified port will be updated to reflect
+ * what the system allocated.
*/
- protected abstract int getServerPort();
+ public static class Descriptor {
+ private int port;
+ private final String path;
+ private final HttpServlet servlet;
+
+ /**
+ * Create a servlet descriptor.
+ * @param port the servlet port (or 0 if system allocated)
+ * @param path the servlet path
+ * @param servlet the servlet instance
+ */
+ public Descriptor(int port, String path, HttpServlet servlet) {
+ this.port = port;
+ this.path = path;
+ this.servlet = servlet;
+ }
+
+ public String toString() {
+ return servlet.getClass().getSimpleName() + ":" + port+ "/"+ path ;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public HttpServlet getServlet() {
+ return servlet;
+ }
+ }
/**
- * Creates the servlet instance.
- *
It is often advisable to use {@link ServletSecurity} to proxy the actual servlet instance.
- * @return the servlet instance
- * @throws IOException if servlet creation fails
+ * Adds a servlet instance.
+ *
The servlet port can be shared between servlets; if 0, the system will provide
+ * a port. If the port is < 0, the system will provide a port dedicated (ie non-shared)
+ * to the servlet.
+ * @param port the servlet port
+ * @param path the servlet path
+ * @param servlet a servlet instance
+ * @return a descriptor
*/
- protected abstract HttpServlet createServlet() throws IOException;
+ public Descriptor addServlet(int port, String path, HttpServlet servlet){
+ Descriptor descriptor = new Descriptor(port, path, servlet);
+ return addServlet(descriptor);
+ }
/**
- * Creates the servlet context.
- * @param servlet the servlet
- * @return a context instance
+ * Adds a servlet instance.
+ *
+ * @param descriptor a descriptor
+ * @return the descriptor
*/
- protected ServletContextHandler createContext(HttpServlet servlet) {
- // hook the servlet
- ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
- context.setContextPath("/");
- ServletHolder servletHolder = new ServletHolder(servlet);
- servletHolder.setInitParameter("javax.ws.rs.Application", "ServiceListPublic");
- final String path = getServletPath();
- context.addServlet(servletHolder, "/" + path + "/*");
- context.setVirtualHosts(null);
- context.setGzipHandler(new GzipHandler());
- return context;
+ public Descriptor addServlet(Descriptor descriptor){
+ descriptorsMap.put(descriptor.getServlet(), descriptor);
+ return descriptor;
}
/**
* Creates a server instance.
- *
Default use configuration to determine threadpool constants?
+ *
Default use configuration to determine thread-pool constants?
+ *
* @return the server instance
* @throws IOException if server creation fails
*/
@@ -100,23 +148,24 @@ protected Server createServer() throws IOException {
final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_MIN);
final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_IDLE);
final QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads, minThreads, idleTimeout);
- return new Server(threadPool);
+ Server server = new Server(threadPool);
+ server.setStopAtShutdown(true);
+ return server;
}
/**
* Creates a server instance and a connector on a given port.
+ *
+ * @param server the server instance
+ * @param sslContextFactory the ssl factory
* @param port the port
- * @return the server instance listening to the port
+ * @return the server connector listening to the port
* @throws IOException if server creation fails
*/
- protected Server createServer(int port) throws IOException {
- final Server server = createServer();
- server.setStopAtShutdown(true);
- final SslContextFactory sslContextFactory = ServletSecurity.createSslContextFactory(configuration);
+ protected ServerConnector createConnector(Server server, SslContextFactory sslContextFactory, int port) throws IOException {
final ServerConnector connector = new ServerConnector(server, sslContextFactory);
connector.setPort(port);
connector.setReuseAddress(true);
- server.addConnector(connector);
HttpConnectionFactory httpFactory = connector.getConnectionFactory(HttpConnectionFactory.class);
// do not leak information
if (httpFactory != null) {
@@ -124,28 +173,141 @@ protected Server createServer(int port) throws IOException {
httpConf.setSendServerVersion(false);
httpConf.setSendXPoweredBy(false);
}
- return server;
+ return connector;
+ }
+
+ /**
+ * Adds a servlet to its intended servlet context handler.
+ * @param handlersMap the map of port to handlers
+ * @param descriptor the servlet descriptor
+ * @throws IOException
+ */
+ protected void addServlet(Map handlersMap, Descriptor descriptor) throws IOException {
+ final int port = descriptor.getPort();
+ final String path = descriptor.getPath();
+ final HttpServlet servlet = descriptor.getServlet();
+ // if port is < 0, use one for this servlet only
+ int key = port < 0 ? -1 - handlersMap.size() : port;
+ ServletContextHandler handler = handlersMap.computeIfAbsent(key, p -> {
+ ServletContextHandler servletHandler = new ServletContextHandler(ServletContextHandler.NO_SESSIONS);
+ servletHandler.setContextPath("/");
+ servletHandler.setGzipHandler(new GzipHandler());
+ return servletHandler;
+ });
+ ServletHolder servletHolder = new ServletHolder(servlet);
+ servletHolder.setInitParameter("javax.ws.rs.Application", "ServiceListPublic");
+ handler.addServlet(servletHolder, "/" + path + "/*");
}
/**
- * Convenience method to start a http server that only serves this servlet.
- * @return the server instance or null if port < 0
+ * Convenience method to start a http server that serves all configured
+ * servlets.
+ *
+ * @return the server instance or null if no servlet was configured
* @throws Exception if servlet initialization fails
*/
public Server startServer() throws Exception {
- int port = getServerPort();
- if (port < 0) {
+ // add all servlets
+ Map handlersMap = new HashMap<>();
+ for(Descriptor descriptor : descriptorsMap.values()) {
+ addServlet(handlersMap, descriptor);
+ }
+ final int size = handlersMap.size();
+ if (size == 0) {
return null;
}
- // create the servlet
- final HttpServlet servlet = createServlet();
- // hook the servlet
- ServletContextHandler context = createContext(servlet);
- // Http server
- final Server httpServer = createServer(port);
- httpServer.setHandler(context);
- httpServer.start();
- return httpServer;
+ final Server server = createServer();
+ // create the connectors
+ final SslContextFactory sslFactory = ServletSecurity.createSslContextFactory(configuration);
+ final int[] keys = new int[size];
+ final ServerConnector[] connectors = new ServerConnector[size];
+ final ServletContextHandler[] handlers = new ServletContextHandler[size];
+ Iterator> it = handlersMap.entrySet().iterator();
+ for (int c = 0; it.hasNext(); ++c) {
+ Map.Entry entry = it.next();
+ int key = entry.getKey();
+ keys[c] = key;
+ int port = key < 0? 0 : key;
+ ServerConnector connector = createConnector(server, sslFactory, port);
+ connectors[c] = connector;
+ ServletContextHandler handler = entry.getValue();
+ handlers[c] = handler;
+ // make each servlet context be served only by its dedicated connector
+ String host = "hms" + Integer.toString(c);
+ connector.setName(host);
+ handler.setVirtualHosts(new String[]{"@"+host});
+ }
+ // hook the connectors and the handlers
+ server.setConnectors(connectors);
+ HandlerCollection portHandler = new ContextHandlerCollection();
+ portHandler.setHandlers(handlers);
+ server.setHandler(portHandler);
+ // start the server
+ server.start();
+ // collect auto ports
+ for (int i = 0; i < connectors.length; ++i) {
+ int port = connectors[i].getLocalPort();
+ ServletContextHandler handler = handlers[i];
+ ServletHolder[] holders = handler.getServletHandler().getServlets();
+ for(ServletHolder holder : holders) {
+ Servlet servlet = holder.getServletInstance();
+ if (servlet != null) {
+ Descriptor descriptor = descriptorsMap.get(servlet);
+ if (descriptor != null) {
+ descriptor.port = port;
+ }
+ }
+ }
+ }
+ return server;
}
+ /**
+ * Helper for generic use case.
+ * @param logger the logger
+ * @param conf the configuration
+ * @param describe the functions to create descriptors
+ * @return a server instance
+ */
+ @SafeVarargs
+ public static Server startServer(
+ Logger logger,
+ Configuration conf,
+ Function... describe) {
+ List descriptors = new ArrayList();
+ Arrays.asList(describe).forEach(functor -> {
+ ServletServerBuilder.Descriptor descriptor = functor.apply(conf);
+ if (descriptor != null) {
+ descriptors.add(descriptor);
+ };
+ });
+ if (!descriptors.isEmpty()) {
+ ServletServerBuilder builder = new ServletServerBuilder(conf);
+ descriptors.forEach(d -> builder.addServlet(d));
+ try {
+ Server server = builder.startServer();
+ if (server != null) {
+ if (!server.isStarted()) {
+ logger.error("Unable to start property-maps servlet server on {}", server.getURI());
+ } else {
+ descriptors.forEach(descriptor -> {
+ logger.info("Started {} servlet on {}:{}",
+ descriptor.toString(),
+ descriptor.getPort(),
+ descriptor.getPath());
+ });
+ }
+ }
+ return server;
+ } catch(Exception exception) {
+ logger.error("Unable to start servlet server", exception);
+ return null;
+ } catch(Throwable throwable) {
+ logger.error("Unable to start servlet server", throwable);
+ return null;
+ }
+ }
+ return null;
+ }
}
+
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestServletServerBuilder.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestServletServerBuilder.java
new file mode 100644
index 000000000000..e752ef592e26
--- /dev/null
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestServletServerBuilder.java
@@ -0,0 +1,233 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import com.google.gson.Gson;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URL;
+import java.net.ServerSocket;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.IdentityHashMap;
+import java.util.Map;
+import java.util.function.Function;
+import javax.servlet.Servlet;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
+import org.eclipse.jetty.server.Server;
+import org.junit.experimental.categories.Category;
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import static org.apache.hadoop.hive.metastore.ServletServerBuilder.Descriptor;
+
+@Category(MetastoreUnitTest.class)
+public class TestServletServerBuilder {
+
+ private static final Logger LOG = LoggerFactory.getLogger(TestServletServerBuilder.class);
+
+ private static Function describeServlet(final Map descriptors, int port, String greeting) {
+ return configuration -> {
+ String name = greeting.toLowerCase();
+ HttpServlet s1 = new HelloServlet(greeting) {
+ @Override
+ public String getServletName() {
+ return name + "()";
+ }
+ };
+ Descriptor descriptor = new Descriptor(port, name, s1);
+ descriptors.put(s1.getServletName(), descriptor);
+ return descriptor;
+ };
+ }
+
+ @Test
+ public void testOne() throws Exception {
+ Configuration conf = new Configuration();
+ // keeping track of what is built
+ final Map descriptors = new HashMap();
+ Function fd1 = describeServlet(descriptors, 0, "ONE");
+ Function fd2 = describeServlet(descriptors, 0, "TWO");
+ // the 'conventional' way of starting the server
+ Server server = ServletServerBuilder.startServer(LOG, conf, fd1, fd2);
+
+ Descriptor d1 = descriptors.get("one()");
+ Descriptor d2 = descriptors.get("two()");
+ // same port for both servlets
+ Assert.assertTrue(d1.getPort() > 0);
+ Assert.assertEquals(d1.getPort(), d2.getPort());
+ // check
+ URI uri = URI.create("http://localhost:" + d1.getPort());
+ Object one = clientCall(uri.resolve("/one").toURL());
+ Assert.assertEquals("ONE", one);
+ uri = URI.create("http://localhost:" + d2.getPort());
+ Object two = clientCall(uri.resolve("/two").toURL());
+ Assert.assertEquals("TWO", two);
+ server.stop();
+ }
+
+ @Test
+ public void testOnePort() throws Exception {
+ int port;
+ try (ServerSocket server0 = new ServerSocket(0)) {
+ port = server0.getLocalPort();
+ } catch (IOException xio) {
+ // cant run test if can not get free port
+ return;
+ }
+ onePort(port);
+ }
+
+ @Test
+ public void testOnePortAuto() throws Exception {
+ onePort(0);
+ }
+
+ void onePort(int port) throws Exception {
+ Configuration conf = new Configuration();
+ ServletServerBuilder ssb = new ServletServerBuilder(conf);
+ HttpServlet s1 = new HelloServlet("ONE");
+ HttpServlet s2 = new HelloServlet("TWO");
+ Descriptor d1 = ssb.addServlet(port, "one", s1);
+ Descriptor d2 = ssb.addServlet(port, "two", s2);
+ Server server = ssb.startServer();
+ // same port for both servlets
+ Assert.assertTrue(d1.getPort() > 0);
+ Assert.assertEquals(d1.getPort(), d2.getPort());
+ // check
+ URI uri = URI.create("http://localhost:" + d1.getPort());
+ Object one = clientCall(uri.resolve("/one").toURL());
+ Assert.assertEquals("ONE", one);
+ uri = URI.create("http://localhost:" + d2.getPort());
+ Object two = clientCall(uri.resolve("/two").toURL());
+ Assert.assertEquals("TWO", two);
+ server.stop();
+ }
+
+ @Test
+ public void testTwoPorts() throws Exception {
+ runTwoPorts(-1, -2);
+ }
+
+ @Test
+ public void testTwoPortsAuto() throws Exception {
+ int p0, p1;
+ try (ServerSocket server0 = new ServerSocket(0); ServerSocket server1 = new ServerSocket(0)) {
+ p0 = server0.getLocalPort();
+ p1 = server1.getLocalPort();
+ } catch (IOException xio) {
+ // cant do test if can not get port
+ return;
+ }
+ runTwoPorts(p0, p1);
+ }
+
+ void runTwoPorts(int p1, int p2) throws Exception {
+ Configuration conf = new Configuration();
+ ServletServerBuilder ssb = new ServletServerBuilder(conf);
+ HttpServlet s1 = new HelloServlet("ONE");
+ HttpServlet s2 = new HelloServlet("TWO");
+ Descriptor d1 = ssb.addServlet(p1, "one", s1);
+ Descriptor d2 = ssb.addServlet(p2, "two", s2);
+ Map mappings = new IdentityHashMap<>();
+ Server server = ssb.startServer();
+ // different port for both servlets
+ Assert.assertNotEquals(d1.getPort(), d2.getPort());
+
+ URI uri = URI.create("http://localhost:" + d1.getPort());
+ Object one = clientCall(uri.resolve("/one").toURL());
+ Assert.assertEquals("ONE", one);
+ // fail, not found
+ Object o404 = clientCall(uri.resolve("/two").toURL());
+ Assert.assertEquals(404, o404);
+ uri = URI.create("http://localhost:" + d2.getPort());
+ Object two = clientCall(uri.resolve("/two").toURL());
+ Assert.assertEquals("TWO", two);
+ // fail, not found
+ o404 = clientCall(uri.resolve("/one").toURL());
+ Assert.assertEquals(404, o404);
+ server.stop();
+ }
+
+ static int findFreePort() throws IOException {
+ try (ServerSocket server0 = new ServerSocket(0)) {
+ return server0.getLocalPort();
+ }
+ }
+
+ static int find2FreePort() throws IOException {
+ try (ServerSocket socket0 = new ServerSocket(0)) {
+ return socket0.getLocalPort();
+ }
+ }
+
+ /**
+ * Performs a Json client call.
+ *
+ * @param url the url
+ * @return the result the was returned through Json
+ * @throws IOException if marshalling the request/response fail
+ */
+ static Object clientCall(URL url) throws IOException {
+ HttpURLConnection con = (HttpURLConnection) url.openConnection();
+ con.setRequestMethod("GET");
+ con.setRequestProperty("Content-Type", "application/json");
+ con.setRequestProperty("Accept", "application/json");
+ con.setDoOutput(true);
+ int responseCode = con.getResponseCode();
+ if (responseCode == HttpServletResponse.SC_OK) {
+ try (Reader reader = new BufferedReader(
+ new InputStreamReader(con.getInputStream(), StandardCharsets.UTF_8))) {
+ return new Gson().fromJson(reader, Object.class);
+ }
+ }
+ return responseCode;
+ }
+
+}
+
+class HelloServlet extends HttpServlet {
+
+ final String greeting;
+
+ public HelloServlet() {
+ this("Hello");
+ }
+
+ public HelloServlet(String greeting) {
+ this.greeting = greeting;
+ }
+
+ @Override
+ protected void doGet(HttpServletRequest request,
+ HttpServletResponse response) throws ServletException, IOException {
+ response.setContentType("application/json");
+ response.setStatus(HttpServletResponse.SC_OK);
+ response.getWriter().println(greeting);
+ }
+}
diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml
index 6e55634f2fdd..1dcfd8127654 100644
--- a/standalone-metastore/pom.xml
+++ b/standalone-metastore/pom.xml
@@ -29,7 +29,7 @@
metastore-commonmetastore-servermetastore-tools
- metastore-iceberg-catalog
+ metastore-catalog4.1.0-SNAPSHOT
From 11c97181ce76151d08a1f44be1d6b2351e8e3d45 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Fri, 21 Feb 2025 15:01:55 +0100
Subject: [PATCH 29/40] HIVE-28059 : - moving module to metastore-rest-catalog;
---
.../{metastore-catalog => metastore-rest-catalog}/pom.xml | 4 ++--
.../main/java/org/apache/iceberg/rest/HMSCachingCatalog.java | 0
.../main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java | 0
.../main/java/org/apache/iceberg/rest/HMSCatalogServer.java | 0
.../main/java/org/apache/iceberg/rest/HMSCatalogServlet.java | 0
.../test/java/org/apache/iceberg/hive/IcebergTestHelper.java | 0
.../src/test/java/org/apache/iceberg/rest/HMSTestBase.java | 0
.../src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java | 0
.../src/test/resources/auth/jwt/jwt-authorized-key.json | 0
.../src/test/resources/auth/jwt/jwt-unauthorized-key.json | 0
.../src/test/resources/auth/jwt/jwt-verification-jwks.json | 0
.../src/test/resources/log4j2.properties | 0
standalone-metastore/pom.xml | 2 +-
13 files changed, 3 insertions(+), 3 deletions(-)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/pom.xml (98%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/test/java/org/apache/iceberg/rest/HMSTestBase.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/test/resources/auth/jwt/jwt-authorized-key.json (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/test/resources/auth/jwt/jwt-unauthorized-key.json (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/test/resources/auth/jwt/jwt-verification-jwks.json (100%)
rename standalone-metastore/{metastore-catalog => metastore-rest-catalog}/src/test/resources/log4j2.properties (100%)
diff --git a/standalone-metastore/metastore-catalog/pom.xml b/standalone-metastore/metastore-rest-catalog/pom.xml
similarity index 98%
rename from standalone-metastore/metastore-catalog/pom.xml
rename to standalone-metastore/metastore-rest-catalog/pom.xml
index 865c5e8b6f02..8202741fdded 100644
--- a/standalone-metastore/metastore-catalog/pom.xml
+++ b/standalone-metastore/metastore-rest-catalog/pom.xml
@@ -17,8 +17,8 @@
4.1.0-SNAPSHOT4.0.0
- hive-standalone-metastore-catalog
- Hive Metastore Iceberg Catalog
+ hive-standalone-metastore-rest-catalog
+ Hive Metastore Iceberg REST Catalog..8
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
rename to standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
rename to standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
rename to standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
diff --git a/standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
rename to standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
diff --git a/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
rename to standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/hive/IcebergTestHelper.java
diff --git a/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
rename to standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
diff --git a/standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
rename to standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
rename to standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-authorized-key.json
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
rename to standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-unauthorized-key.json
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json b/standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
rename to standalone-metastore/metastore-rest-catalog/src/test/resources/auth/jwt/jwt-verification-jwks.json
diff --git a/standalone-metastore/metastore-catalog/src/test/resources/log4j2.properties b/standalone-metastore/metastore-rest-catalog/src/test/resources/log4j2.properties
similarity index 100%
rename from standalone-metastore/metastore-catalog/src/test/resources/log4j2.properties
rename to standalone-metastore/metastore-rest-catalog/src/test/resources/log4j2.properties
diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml
index 1dcfd8127654..d712cf0fd495 100644
--- a/standalone-metastore/pom.xml
+++ b/standalone-metastore/pom.xml
@@ -29,7 +29,7 @@
metastore-commonmetastore-servermetastore-tools
- metastore-catalog
+ metastore-rest-catalog4.1.0-SNAPSHOT
From 0b1add3dc31291efc9ee5b1d6802a500a5349842 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Fri, 21 Feb 2025 15:04:00 +0100
Subject: [PATCH 30/40] HIVE-28059: Update src.xml
---
packaging/src/main/assembly/src.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packaging/src/main/assembly/src.xml b/packaging/src/main/assembly/src.xml
index 654682d89926..9cdbed13776e 100644
--- a/packaging/src/main/assembly/src.xml
+++ b/packaging/src/main/assembly/src.xml
@@ -105,7 +105,7 @@
standalone-metastore/metastore-common/**/*standalone-metastore/metastore-server/**/*standalone-metastore/metastore-tools/**/*
- standalone-metastore/metastore-catalog/**/*
+ standalone-metastore/metastore-rest-catalog/**/*standalone-metastore/src/assembly/src.xmlstandalone-metastore/pom.xmlstreaming/**/*
From 670ab10677569e480be38edf1719336ee3158194 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Fri, 21 Feb 2025 21:04:03 +0100
Subject: [PATCH 31/40] HIVE-28059 : - latest remarks (naming, etc)
---
.../hive/metastore/conf/MetastoreConf.java | 20 ++--
.../metastore-rest-catalog/pom.xml | 2 +-
...alogServer.java => HMSCatalogFactory.java} | 13 ++-
.../org/apache/iceberg/rest/HMSTestBase.java | 14 ++-
.../hadoop/hive/metastore/HiveMetaStore.java | 53 +++++++++--
.../hive/metastore/ServletServerBuilder.java | 92 ++++++++++++-------
.../metastore/properties/HMSServletTest.java | 3 +-
.../properties/HMSServletTest1A.java | 4 +-
.../metastore/properties/HMSServletTestA.java | 4 +-
9 files changed, 129 insertions(+), 76 deletions(-)
rename standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/{HMSCatalogServer.java => HMSCatalogFactory.java} (94%)
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
index ce91867922fe..e0571664193c 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
@@ -1828,7 +1828,7 @@ public enum ConfVars {
),
ICEBERG_CATALOG_SERVLET_FACTORY("hive.metastore.catalog.servlet.factory",
"hive.metastore.catalog.servlet.factory",
- "org.apache.iceberg.rest.HMSCatalogServer",
+ "org.apache.iceberg.rest.HMSCatalogFactory",
"HMS Iceberg Catalog servlet factory class name."
+ "The factory needs to expose a method: "
+ "public static HttpServlet createServlet(Configuration configuration);"
@@ -1851,17 +1851,17 @@ public enum ConfVars {
"hive.metastore.catalog.cache.expiry", 60_000L,
"HMS Iceberg Catalog cache expiry."
),
- EMBEDDED_JETTY_THREADPOOL_MIN("hive.metastore.embedded.jetty.threadpool.min",
- "hive.metastore.embedded.jetty.threadpool.min", 2,
- "HMS embedded Jetty server(s) minimum number of threads."
+ HTTPSERVER_THREADPOOL_MIN("hive.metastore.httpserver.threadpool.min",
+ "hive.metastore.httpserver.threadpool.min", 8,
+ "HMS embedded HTTP server minimum number of threads."
),
- EMBEDDED_JETTY_THREADPOOL_MAX("hive.metastore.embedded.jetty.threadpool.max",
- "hive.metastore.embedded.jetty.threadpool.max", 256,
- "HMS embedded Jetty server(s) maximum number of threads."
+ HTTPSERVER_THREADPOOL_MAX("hive.metastore.httpserver.threadpool.max",
+ "hive.metastore.httpserver.threadpool.max", 256,
+ "HMS embedded HTTP server maximum number of threads."
),
- EMBEDDED_JETTY_THREADPOOL_IDLE("hive.metastore.embedded.jetty.threadpool.idle",
- "hive.metastore.embedded.jetty.threadpool.idle", 60_000L,
- "HMS embedded Jetty server(s) thread idle time."
+ HTTPSERVER_THREADPOOL_IDLE("hive.metastore.httpserver.threadpool.idle",
+ "hive.metastore.httpserver.threadpool.idle", 60_000L,
+ "HMS embedded HTTP server thread idle time."
),
// Deprecated Hive values that we are keeping for backwards compatibility.
diff --git a/standalone-metastore/metastore-rest-catalog/pom.xml b/standalone-metastore/metastore-rest-catalog/pom.xml
index 8202741fdded..43871e6f456b 100644
--- a/standalone-metastore/metastore-rest-catalog/pom.xml
+++ b/standalone-metastore/metastore-rest-catalog/pom.xml
@@ -18,7 +18,7 @@
4.0.0hive-standalone-metastore-rest-catalog
- Hive Metastore Iceberg REST Catalog
+ Hive Standalone Metastore REST Catalog..8
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
similarity index 94%
rename from standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
rename to standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
index eb5886972eaf..6c214f2a13f6 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
@@ -31,15 +31,14 @@
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.hive.HiveCatalog;
-import org.eclipse.jetty.server.Server;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Iceberg Catalog server creator.
*/
-public class HMSCatalogServer {
- private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogServer.class);
+public class HMSCatalogFactory {
+ private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogFactory.class);
protected static final AtomicReference> catalogRef = new AtomicReference<>();
public static Catalog getLastCatalog() {
@@ -56,7 +55,7 @@ protected static void setLastCatalog(Catalog catalog) {
protected final String path;
protected Catalog catalog;
- protected HMSCatalogServer(Configuration conf, Catalog catalog) {
+ protected HMSCatalogFactory(Configuration conf, Catalog catalog) {
port = MetastoreConf.getIntVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PORT);
path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH);
this.configuration = conf;
@@ -120,12 +119,12 @@ protected HttpServlet createServlet() throws IOException {
* Factory method to describe Iceberg servlet.
*
This one is looked up through reflection to start from HMS.
*
- * @param conf the configuration
+ * @param configuration the configuration
* @return the servlet descriptor instance
*/
public static ServletServerBuilder.Descriptor createServlet(Configuration configuration) {
try {
- HMSCatalogServer hms = new HMSCatalogServer(configuration, null);
+ HMSCatalogFactory hms = new HMSCatalogFactory(configuration, null);
HttpServlet servlet = hms.createServlet();
if (servlet != null) {
return new ServletServerBuilder.Descriptor(hms.getPort(), hms.getPath(), servlet) {
@@ -134,7 +133,7 @@ public static ServletServerBuilder.Descriptor createServlet(Configuration config
}
};
}
- } catch (Exception exception) {
+ } catch (IOException exception) {
LOG.error("failed to create servlet ", exception);
}
return null;
diff --git a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
index abcce301ca22..8fbaae049706 100644
--- a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
+++ b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
@@ -201,27 +201,25 @@ public void setUp() throws Exception {
Database db = new Database(DB_NAME, "catalog test", location, Collections.emptyMap());
client.createDatabase(db);
- int[] aport = { -1 };
- Catalog ice = acquireServer(aport);
+ Catalog ice = acquireServer();
catalog = ice;
nsCatalog = catalog instanceof SupportsNamespaces? (SupportsNamespaces) catalog : null;
- catalogPort = aport[0];
+ catalogPort = HiveMetaStore.getCatalogServletPort();
}
private static String format(String format, Object... params) {
return org.slf4j.helpers.MessageFormatter.arrayFormat(format, params).getMessage();
}
- private static Catalog acquireServer(int[] port) throws InterruptedException {
+ private static Catalog acquireServer() throws InterruptedException {
final int wait = 200;
- Server iceServer = HiveMetaStore.getIcebergServer();
+ Server iceServer = HiveMetaStore.getServletServer();
int tries = WAIT_FOR_SERVER / wait;
while(iceServer == null && tries-- > 0) {
Thread.sleep(wait);
- iceServer = HiveMetaStore.getIcebergServer();
+ iceServer = HiveMetaStore.getServletServer();
}
if (iceServer != null) {
- port[0] = iceServer.getURI().getPort();
boolean starting;
tries = WAIT_FOR_SERVER / wait;
while((starting = iceServer.isStarting()) && tries-- > 0) {
@@ -230,7 +228,7 @@ private static Catalog acquireServer(int[] port) throws InterruptedException {
if (starting) {
LOG.warn("server still starting after {}ms", WAIT_FOR_SERVER);
}
- Catalog ice = HMSCatalogServer.getLastCatalog();
+ Catalog ice = HMSCatalogFactory.getLastCatalog();
if (ice == null) {
throw new NullPointerException(format("unable to acquire catalog after {}ms", WAIT_FOR_SERVER));
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 1576cb6a2933..adb9c78fb698 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hive.metastore;
-import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.SynchronousQueue;
@@ -122,17 +121,39 @@ public class HiveMetaStore extends ThriftHiveMetastore {
private static ZooKeeperHiveHelper zooKeeperHelper = null;
private static String msHost = null;
private static ThriftServer thriftServer;
+ /** the servlet server. */
private static Server servletServer = null;
+ /** the port and path of the property servlet. */
+ private static int propertyServletPort = -1;
+ /** the port and path of the catalog servlet. */
+ private static int catalogServletPort = -1;
-
- public static Server getPropertyServer() {
+ /**
+ * Gets the embedded servlet server.
+ * @return the server instance or null
+ */
+ public static Server getServletServer() {
return servletServer;
}
- public static Server getIcebergServer() {
- return servletServer;
+ /**
+ * Gets the property servlet connector port.
+ *
If configuration is 0, this port is allocated by the system.
+ * @return the connector port or -1 if not configured
+ */
+ public static int getPropertyServletPort() {
+ return propertyServletPort;
}
-
+
+ /**
+ * Gets the catalog servlet connector port.
+ *
If configuration is 0, this port is allocated by the system.
+ * @return the connector port or -1 if not configured
+ */
+ public static int getCatalogServletPort() {
+ return catalogServletPort;
+ }
+
public static boolean isRenameAllowed(Database srcDB, Database destDB) {
if (!srcDB.getName().equalsIgnoreCase(destDB.getName())) {
if (ReplChangeManager.isSourceOfReplication(srcDB) || ReplChangeManager.isSourceOfReplication(destDB)) {
@@ -748,12 +769,24 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
}
}
// optionally create and start the property and Iceberg REST server
- servletServer = ServletServerBuilder.startServer(LOG, conf,
- PropertyServlet::createServlet,
- HiveMetaStore::createIcebergServlet);
+ ServletServerBuilder.Descriptor properties = PropertyServlet.createServlet(conf);
+ ServletServerBuilder.Descriptor catalog = createIcebergServlet(conf);
+ ServletServerBuilder builder = new ServletServerBuilder(conf);
+ builder.addServlet(properties);
+ builder.addServlet(catalog);
+ servletServer = builder.start(LOG);
+ if (servletServer != null) {
+ if (properties != null) {
+ propertyServletPort = properties.getPort();
+ }
+ if (catalog != null) {
+ catalogServletPort = catalog.getPort();
+ }
+ }
+ // main server
thriftServer.start();
}
-
+
/**
* Creates the Iceberg REST catalog servlet descriptor.
* @param configuration the configuration
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
index 54d77ba46e3f..80354a20678e 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -52,7 +52,7 @@ public class ServletServerBuilder {
/**
* Keeping track of descriptors.
*/
- private Map descriptorsMap = new IdentityHashMap<>();
+ private final Map descriptorsMap = new IdentityHashMap<>();
/**
* The configuration instance.
*/
@@ -93,6 +93,7 @@ public Descriptor(int port, String path, HttpServlet servlet) {
this.servlet = servlet;
}
+ @Override
public String toString() {
return servlet.getClass().getSimpleName() + ":" + port+ "/"+ path ;
}
@@ -131,8 +132,10 @@ public Descriptor addServlet(int port, String path, HttpServlet servlet){
* @param descriptor a descriptor
* @return the descriptor
*/
- public Descriptor addServlet(Descriptor descriptor){
- descriptorsMap.put(descriptor.getServlet(), descriptor);
+ public Descriptor addServlet(Descriptor descriptor) {
+ if (descriptor != null) {
+ descriptorsMap.put(descriptor.getServlet(), descriptor);
+ }
return descriptor;
}
@@ -144,9 +147,9 @@ public Descriptor addServlet(Descriptor descriptor){
* @throws IOException if server creation fails
*/
protected Server createServer() throws IOException {
- final int maxThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_MAX);
- final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_MIN);
- final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.EMBEDDED_JETTY_THREADPOOL_IDLE);
+ final int maxThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_MAX);
+ final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_MIN);
+ final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_IDLE);
final QueuedThreadPool threadPool = new QueuedThreadPool(maxThreads, minThreads, idleTimeout);
Server server = new Server(threadPool);
server.setStopAtShutdown(true);
@@ -219,14 +222,12 @@ public Server startServer() throws Exception {
final Server server = createServer();
// create the connectors
final SslContextFactory sslFactory = ServletSecurity.createSslContextFactory(configuration);
- final int[] keys = new int[size];
final ServerConnector[] connectors = new ServerConnector[size];
final ServletContextHandler[] handlers = new ServletContextHandler[size];
Iterator> it = handlersMap.entrySet().iterator();
for (int c = 0; it.hasNext(); ++c) {
Map.Entry entry = it.next();
int key = entry.getKey();
- keys[c] = key;
int port = key < 0? 0 : key;
ServerConnector connector = createConnector(server, sslFactory, port);
connectors[c] = connector;
@@ -263,51 +264,72 @@ public Server startServer() throws Exception {
}
/**
- * Helper for generic use case.
- * @param logger the logger
+ * Creates a builder.
* @param conf the configuration
- * @param describe the functions to create descriptors
- * @return a server instance
+ * @param describe the functions to call that create servlet descriptors
+ * @return the builder or null if no descriptors
*/
@SafeVarargs
- public static Server startServer(
- Logger logger,
- Configuration conf,
+ public static ServletServerBuilder builder(Configuration conf,
Function... describe) {
List descriptors = new ArrayList();
Arrays.asList(describe).forEach(functor -> {
ServletServerBuilder.Descriptor descriptor = functor.apply(conf);
if (descriptor != null) {
descriptors.add(descriptor);
- };
+ }
});
if (!descriptors.isEmpty()) {
ServletServerBuilder builder = new ServletServerBuilder(conf);
descriptors.forEach(d -> builder.addServlet(d));
- try {
- Server server = builder.startServer();
- if (server != null) {
- if (!server.isStarted()) {
- logger.error("Unable to start property-maps servlet server on {}", server.getURI());
- } else {
- descriptors.forEach(descriptor -> {
+ return builder;
+ }
+ return null;
+ }
+
+ /**
+ * Creates and starts the server.
+ * @param logger a logger to output info
+ * @return the server instance (or null if error)
+ */
+ public Server start(Logger logger) {
+ try {
+ Server server = startServer();
+ if (server != null) {
+ if (!server.isStarted()) {
+ logger.error("Unable to start servlet server on {}", server.getURI());
+ } else {
+ descriptorsMap.values().forEach(descriptor -> {
logger.info("Started {} servlet on {}:{}",
descriptor.toString(),
- descriptor.getPort(),
- descriptor.getPath());
- });
- }
+ descriptor.getPort(),
+ descriptor.getPath());
+ });
}
- return server;
- } catch(Exception exception) {
- logger.error("Unable to start servlet server", exception);
- return null;
- } catch(Throwable throwable) {
- logger.error("Unable to start servlet server", throwable);
- return null;
}
+ return server;
+ } catch (Exception exception) {
+ logger.error("Unable to start servlet server", exception);
+ return null;
+ } catch (Throwable throwable) {
+ logger.error("Unable to start servlet server", throwable);
+ return null;
}
- return null;
+ }
+
+ /**
+ * Helper for generic use case.
+ * @param logger the logger
+ * @param conf the configuration
+ * @param describe the functions to create descriptors
+ * @return a server instance
+ */
+ @SafeVarargs
+ public static Server startServer(
+ Logger logger,
+ Configuration conf,
+ Function... describe) {
+ return builder(conf, describe).start(logger);
}
}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
index 75f670409225..b7728a3385ab 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
@@ -51,6 +51,7 @@
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
public class HMSServletTest extends HMSTestBase {
protected static final String CLI = "hmscli";
@@ -64,7 +65,7 @@ public class HMSServletTest extends HMSTestBase {
if (servletServer == null || !servletServer.isStarted()) {
Assert.fail("http server did not start");
}
- sport = servletServer.getURI().getPort();
+ sport = HiveMetaStore.getPropertyServletPort();
}
return sport;
}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
index 5ff45d90dd82..88f55f82ac23 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
@@ -45,11 +45,11 @@ protected int createServer(Configuration conf) throws Exception {
.willReturn(ok()
.withBody(Files.readAllBytes(jwtVerificationJWKSFile.toPath()))));
thriftPort = MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), conf);
- servletServer = HiveMetaStore.getPropertyServer();
+ servletServer = HiveMetaStore.getServletServer();
if (servletServer == null || !servletServer.isStarted()) {
Assert.fail("http server did not start");
}
- sport = servletServer.getURI().getPort();
+ sport = HiveMetaStore.getPropertyServletPort();
return sport;
}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
index 10a54457ab12..22f245330b55 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
@@ -43,11 +43,11 @@ protected int createServer(Configuration conf) throws Exception {
.willReturn(ok()
.withBody(Files.readAllBytes(jwtVerificationJWKSFile.toPath()))));
thriftPort = MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), conf);
- servletServer = HiveMetaStore.getPropertyServer();
+ servletServer = HiveMetaStore.getServletServer();
if (servletServer == null || !servletServer.isStarted()) {
Assert.fail("http server did not start");
}
- sport = servletServer.getURI().getPort();
+ sport = HiveMetaStore.getPropertyServletPort();
return sport;
}
From 77fb8672821b1d4525aeb0bdfbe15dcf56ca8258 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Sun, 23 Feb 2025 19:37:23 +0100
Subject: [PATCH 32/40] HIVE-28059 : fixing property servlet tests; - catalog &
property servlet ports shall not be confused; - some property tests were not
referring to the proper port; - no tests were executed since they lacked the
proper annotation (MetastoreUnitTest); - nits on ServletServerBuilder;
---
.../hive/metastore/ServletServerBuilder.java | 10 +++-
.../metastore/properties/HMSDirectTest.java | 21 +++----
.../metastore/properties/HMSServletTest.java | 58 +++++++++++--------
.../metastore/properties/HMSServletTest1.java | 26 +++++----
.../properties/HMSServletTest1A.java | 11 ++--
.../metastore/properties/HMSServletTestA.java | 10 ++--
.../metastore/properties/HMSTestBase.java | 45 +++++++-------
.../metastore/properties/HMSThriftTest.java | 17 +++---
.../properties/PropertyStoreTest.java | 6 +-
9 files changed, 111 insertions(+), 93 deletions(-)
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
index 80354a20678e..46baad72fdc1 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -109,6 +109,10 @@ public String getPath() {
public HttpServlet getServlet() {
return servlet;
}
+
+ void setPort(int port) {
+ this.port = port;
+ }
}
/**
@@ -180,7 +184,7 @@ protected ServerConnector createConnector(Server server, SslContextFactory sslCo
}
/**
- * Adds a servlet to its intended servlet context handler.
+ * Adds a servlet to its intended servlet context context.
* @param handlersMap the map of port to handlers
* @param descriptor the servlet descriptor
* @throws IOException
@@ -245,7 +249,7 @@ public Server startServer() throws Exception {
server.setHandler(portHandler);
// start the server
server.start();
- // collect auto ports
+ // collect automatically assigned connector ports
for (int i = 0; i < connectors.length; ++i) {
int port = connectors[i].getLocalPort();
ServletContextHandler handler = handlers[i];
@@ -255,7 +259,7 @@ public Server startServer() throws Exception {
if (servlet != null) {
Descriptor descriptor = descriptorsMap.get(servlet);
if (descriptor != null) {
- descriptor.port = port;
+ descriptor.setPort(port);
}
}
}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java
index 7c3c77451649..3e7c0cd2e600 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSDirectTest.java
@@ -17,32 +17,33 @@
*/
package org.apache.hadoop.hive.metastore.properties;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HMSHandler;
import org.apache.hadoop.hive.metastore.ObjectStore;
import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.TreeMap;
-
import static org.apache.hadoop.hive.metastore.properties.PropertyType.DATETIME;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.DOUBLE;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.INTEGER;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.STRING;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
/**
* In-process property manager test.
*/
+@Category(MetastoreUnitTest.class)
public class HMSDirectTest extends HMSTestBase {
protected ObjectStore objectStore = null;
static Random RND = new Random(20230424);
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
index b7728a3385ab..51c4e26727d2 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
@@ -18,8 +18,25 @@
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.PropertyServlet;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
@@ -34,30 +51,21 @@
import org.apache.http.message.BasicNameValuePair;
import org.eclipse.jetty.server.Server;
import org.junit.Assert;
+import org.junit.Before;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
-import javax.servlet.http.HttpServletResponse;
-import java.io.BufferedReader;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.HttpURLConnection;
-import java.net.URI;
-import java.net.URL;
-import java.nio.charset.Charset;
-import java.nio.charset.StandardCharsets;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import org.apache.hadoop.hive.metastore.HiveMetaStore;
-
+@Category(MetastoreUnitTest.class)
public class HMSServletTest extends HMSTestBase {
- protected static final String CLI = "hmscli";
+ String path = null;
Server servletServer = null;
int sport = -1;
-
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
+ }
@Override protected int createServer(Configuration conf) throws Exception {
if (servletServer == null) {
@@ -65,7 +73,7 @@ public class HMSServletTest extends HMSTestBase {
if (servletServer == null || !servletServer.isStarted()) {
Assert.fail("http server did not start");
}
- sport = HiveMetaStore.getPropertyServletPort();
+ sport = servletServer.getURI().getPort();
}
return sport;
}
@@ -82,9 +90,11 @@ public class HMSServletTest extends HMSTestBase {
}
}
+
@Override
protected PropertyClient createClient(Configuration conf, int sport) throws Exception {
- URL url = new URL("http://hive@localhost:" + sport + "/" + CLI + "/" + NS);
+ String path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
+ URL url = new URL("http://hive@localhost:" + sport + "/" + path + "/" + NS);
String jwt = generateJWT();
return new JSonClient(jwt, url);
}
@@ -144,7 +154,7 @@ public Map getProperties(List selection) {
@Test
public void testServletEchoA() throws Exception {
- URL url = new URL("http://hive@localhost:" + sport + "/" + CLI + "/" + NS);
+ URL url = new URL("http://hive@localhost:" + sport + "/" + path + "/" + NS);
Map json = Collections.singletonMap("method", "echo");
String jwt = generateJWT();
// succeed
@@ -177,7 +187,7 @@ public void testProperties0() throws Exception {
.setUserInfo("hive")
.setHost("localhost")
.setPort(sport)
- .setPath("/" + CLI + "/" + NS)
+ .setPath("/" + path + "/" + NS)
.setParameters(nvp)
.build();
HttpGet get = new HttpGet(uri);
@@ -293,7 +303,7 @@ public static Object clientCall(String jwt, URL url, String method, Object arg)
* @throws Exception
*/
private HttpPost createPost(String jwt, String msgBody) {
- HttpPost method = new HttpPost("http://hive@localhost:" + sport + "/" + CLI + "/" + NS);
+ HttpPost method = new HttpPost("http://hive@localhost:" + sport + "/" + path + "/" + NS);
method.addHeader("Authorization", "Bearer " + jwt);
method.addHeader("Content-Type", "application/json");
method.addHeader("Accept", "application/json");
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java
index db15d52e12d4..1a096e38f30c 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java
@@ -18,7 +18,18 @@
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
@@ -29,18 +40,9 @@
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.HttpClients;
+import org.junit.experimental.categories.Category;
-import javax.servlet.http.HttpServletResponse;
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URL;
-import java.nio.charset.Charset;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
+@Category(MetastoreUnitTest.class)
public class HMSServletTest1 extends HMSServletTest {
@Override
public void tearDown() throws Exception {
@@ -52,7 +54,7 @@ public void tearDown() throws Exception {
@Override
protected PropertyClient createClient(Configuration conf, int sport) throws Exception {
- URL url = new URL("http://hive@localhost:" + sport + "/" + CLI + "/" + NS);
+ URL url = new URL("http://hive@localhost:" + sport + "/" + path + "/" + NS);
String jwt = generateJWT();
return new JSonHttpClient(jwt, url.toString());
}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
index 88f55f82ac23..1cf4b3e4e26e 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
@@ -16,22 +16,23 @@
*/
package org.apache.hadoop.hive.metastore.properties;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import java.nio.file.Files;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.junit.Assert;
-
-import java.nio.file.Files;
-
-import static com.github.tomakehurst.wiremock.client.WireMock.get;
-import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import org.junit.experimental.categories.Category;
/**
* Test using the servlet server created by the MetaStore and
* the client based on Apache HttpClient.
*/
+@Category(MetastoreUnitTest.class)
public class HMSServletTest1A extends HMSServletTest1 {
protected int thriftPort;
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
index 22f245330b55..41a2ba06233d 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
@@ -16,20 +16,22 @@
*/
package org.apache.hadoop.hive.metastore.properties;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import java.nio.file.Files;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.junit.Assert;
-import java.nio.file.Files;
-
-import static com.github.tomakehurst.wiremock.client.WireMock.get;
-import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import org.junit.experimental.categories.Category;
/**
* Test using the servlet server created by the MetaStore.
*/
+@Category(MetastoreUnitTest.class)
public class HMSServletTestA extends HMSServletTest {
protected int thriftPort;
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
index 4023076c04da..60d239bcbcbf 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSTestBase.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hive.metastore.properties;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.nimbusds.jose.JWSAlgorithm;
import com.nimbusds.jose.JWSHeader;
@@ -25,25 +27,6 @@
import com.nimbusds.jose.jwk.RSAKey;
import com.nimbusds.jwt.JWTClaimsSet;
import com.nimbusds.jwt.SignedJWT;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.jexl3.JxltEngine;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
-import org.apache.hadoop.hive.metastore.ObjectStore;
-import org.apache.hadoop.hive.metastore.TestObjectStore;
-
-import static com.github.tomakehurst.wiremock.client.WireMock.get;
-import static com.github.tomakehurst.wiremock.client.WireMock.ok;
-import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpStatus;
-import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpType;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
@@ -58,16 +41,30 @@
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
-
-import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.JEXL;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.jexl3.JxltEngine;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.TestObjectStore;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MAINTENANCE_OPERATION;
import static org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MAINTENANCE_STATUS;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpStatus;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager.MaintenanceOpType;
+import static org.apache.hadoop.hive.metastore.properties.PropertyManager.JEXL;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.BOOLEAN;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.DATETIME;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.DOUBLE;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.INTEGER;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.JSON;
import static org.apache.hadoop.hive.metastore.properties.PropertyType.STRING;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public abstract class HMSTestBase {
protected static final String baseDir = System.getProperty("basedir");
@@ -87,12 +84,12 @@ public abstract class HMSTestBase {
/**
* Abstract the property client access on a given namespace.
*/
- interface PropertyClient {
+ protected interface PropertyClient {
boolean setProperties(Map properties);
Map> getProperties(String mapPrefix, String mapPredicate, String... selection) throws IOException;
}
- interface HttpPropertyClient extends PropertyClient {
+ protected interface HttpPropertyClient extends PropertyClient {
default Map getProperties(List selection) throws IOException {
throw new UnsupportedOperationException("not implemented in " + this.getClass());
}
@@ -202,6 +199,8 @@ protected void stopServer(int port) throws Exception {
/**
* Creates a client.
+ * @param conf the configuration
+ * @param port the servlet port
* @return the client instance
* @throws Exception
*/
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java
index 33354ad17b54..b7fa65d6d771 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSThriftTest.java
@@ -17,17 +17,19 @@
*/
package org.apache.hadoop.hive.metastore.properties;
+import java.io.IOException;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.thrift.TException;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
-import java.io.IOException;
-import java.util.Map;
-
+@Category(MetastoreUnitTest.class)
public class HMSThriftTest extends HMSTestBase {
/**
* A Thrift based property client.
@@ -67,16 +69,11 @@ public Map> getProperties(String mapPrefix, String m
MetaStoreTestUtils.close(port);
}
- /**
- * Creates a client.
- * @return the client instance
- * @throws Exception
- */
@Override protected PropertyClient createClient(Configuration conf, int port) throws Exception {
MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_URIS, "http://localhost:" + port);
MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.EXECUTE_SET_UGI, false);
- HiveMetaStoreClient client = new HiveMetaStoreClient(conf);
- return new ThriftPropertyClient(NS, client);
+ HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(conf);
+ return new ThriftPropertyClient(NS, hiveClient);
}
@Test
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java
index 50ab770aaabf..1ef1c2119194 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/PropertyStoreTest.java
@@ -18,12 +18,14 @@
package org.apache.hadoop.hive.metastore.properties;
import com.google.common.base.Supplier;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HMSHandler;
import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
import org.apache.hadoop.hive.metastore.ObjectStore;
import org.apache.hadoop.hive.metastore.TestObjectStore;
import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.model.MMetastoreDBProperties;
@@ -31,11 +33,11 @@
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.nio.charset.StandardCharsets;
-
+@Category(MetastoreUnitTest.class)
public class PropertyStoreTest {
private ObjectStore objectStore = null;
private Configuration conf;
From 447f48d5b58575aacb5adfa8d03d4869595650ac Mon Sep 17 00:00:00 2001
From: Henrib
Date: Mon, 24 Feb 2025 08:36:18 +0100
Subject: [PATCH 33/40] HIVE-28059 : adding REST catalog to packaging;
---
packaging/pom.xml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/packaging/pom.xml b/packaging/pom.xml
index cfc37f087f90..f7c6ac7aee96 100644
--- a/packaging/pom.xml
+++ b/packaging/pom.xml
@@ -424,6 +424,11 @@
hive-webhcat-java-client${project.version}
+
+ org.apache.hive
+ hive-standalone-metastore-rest-catalog
+ ${project.version}
+ org.apache.hadoophadoop-hdfs-client
From b99d91ae76706ccf302d329d69b3f0b7f145b579 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Wed, 26 Feb 2025 21:06:36 +0100
Subject: [PATCH 34/40] HIVE-28059 : addressing easiest pr comments;
---
.../metastore-rest-catalog/pom.xml | 14 +-
.../iceberg/rest/HMSCatalogAdapter.java | 16 +-
.../iceberg/rest/HMSCatalogFactory.java | 2 +-
.../org/apache/iceberg/rest/HMSTestBase.java | 15 +-
.../hadoop/hive/metastore/HiveMetaStore.java | 4 +-
.../hive/metastore/PropertyServlet.java | 97 ++++----
.../hive/metastore/ServletSecurity.java | 10 +-
.../hive/metastore/ServletServerBuilder.java | 220 +++++++++---------
.../metastore/properties/HMSTestBase.java | 24 +-
9 files changed, 195 insertions(+), 207 deletions(-)
diff --git a/standalone-metastore/metastore-rest-catalog/pom.xml b/standalone-metastore/metastore-rest-catalog/pom.xml
index 43871e6f456b..7b09557ef878 100644
--- a/standalone-metastore/metastore-rest-catalog/pom.xml
+++ b/standalone-metastore/metastore-rest-catalog/pom.xml
@@ -17,8 +17,8 @@
4.1.0-SNAPSHOT4.0.0
- hive-standalone-metastore-rest-catalog
- Hive Standalone Metastore REST Catalog
+ hive-metastore-rest-catalog
+ Hive Metastore REST Catalog..8
@@ -39,16 +39,6 @@
hive-standalone-metastore-common${hive.version}
-
- org.apache.hive
- hive-iceberg-shading
- ${hive.version}
-
-
- org.apache.hive
- hive-iceberg-handler
- ${hive.version}
- org.apache.hivehive-iceberg-catalog
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
index 063dbb59e311..cc2738008f75 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
@@ -110,6 +110,8 @@ public class HMSCatalogAdapter implements RESTClient {
private static final String CLIENT_ID = "client_id";
private static final String ACTOR_TOKEN = "actor_token";
private static final String SUBJECT_TOKEN = "subject_token";
+ private static final String VIEWS_PATH = "v1/namespaces/{namespace}/views/{name}";
+ private static final String TABLES_PATH = "v1/namespaces/{namespace}/tables/{table}";
private final Catalog catalog;
private final SupportsNamespaces asNamespaceCatalog;
@@ -150,13 +152,13 @@ enum Route {
null, ListTablesResponse.class),
CREATE_TABLE(HTTPMethod.POST, "v1/namespaces/{namespace}/tables",
CreateTableRequest.class, LoadTableResponse.class),
- LOAD_TABLE(HTTPMethod.GET, "v1/namespaces/{namespace}/tables/{table}",
+ LOAD_TABLE(HTTPMethod.GET, TABLES_PATH,
null, LoadTableResponse.class),
REGISTER_TABLE(HTTPMethod.POST, "v1/namespaces/{namespace}/register",
RegisterTableRequest.class, LoadTableResponse.class),
- UPDATE_TABLE(HTTPMethod.POST, "v1/namespaces/{namespace}/tables/{table}",
+ UPDATE_TABLE(HTTPMethod.POST, TABLES_PATH,
UpdateTableRequest.class, LoadTableResponse.class),
- DROP_TABLE(HTTPMethod.DELETE, "v1/namespaces/{namespace}/tables/{table}"),
+ DROP_TABLE(HTTPMethod.DELETE, TABLES_PATH),
RENAME_TABLE(HTTPMethod.POST, "v1/tables/rename",
RenameTableRequest.class, null),
REPORT_METRICS(HTTPMethod.POST, "v1/namespaces/{namespace}/tables/{table}/metrics",
@@ -165,15 +167,15 @@ enum Route {
CommitTransactionRequest.class, null),
LIST_VIEWS(HTTPMethod.GET, "v1/namespaces/{namespace}/views",
null, ListTablesResponse.class),
- LOAD_VIEW(HTTPMethod.GET, "v1/namespaces/{namespace}/views/{name}",
+ LOAD_VIEW(HTTPMethod.GET, VIEWS_PATH,
null, LoadViewResponse.class),
CREATE_VIEW(HTTPMethod.POST, "v1/namespaces/{namespace}/views",
CreateViewRequest.class, LoadViewResponse.class),
- UPDATE_VIEW(HTTPMethod.POST, "v1/namespaces/{namespace}/views/{name}",
+ UPDATE_VIEW(HTTPMethod.POST, VIEWS_PATH,
UpdateTableRequest.class, LoadViewResponse.class),
RENAME_VIEW(HTTPMethod.POST, "v1/views/rename",
RenameTableRequest.class, null),
- DROP_VIEW(HTTPMethod.DELETE, "v1/namespaces/{namespace}/views/{name}");
+ DROP_VIEW(HTTPMethod.DELETE, VIEWS_PATH);
private final HTTPMethod method;
private final int requiredLength;
@@ -525,7 +527,7 @@ private static void commitTransaction(Catalog catalog, CommitTransactionRequest
transactions.forEach(Transaction::commitTransaction);
}
- @SuppressWarnings("MethodLength")
+ @SuppressWarnings({"MethodLength", "unchecked"})
private T handleRequest(
Route route, Map vars, Object body) {
// update HMS catalog route counter metric
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
index 6c214f2a13f6..1d6d575ac6f2 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
@@ -94,7 +94,7 @@ protected Catalog createCatalog() {
final String catalogName = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.CATALOG_DEFAULT);
catalog.initialize(catalogName, properties);
long expiry = MetastoreConf.getLongVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_CACHE_EXPIRY);
- return expiry > 0 ? new HMSCachingCatalog(catalog, expiry) : catalog;
+ return expiry > 0 ? new HMSCachingCatalog<>(catalog, expiry) : catalog;
}
protected HttpServlet createServlet(Catalog catalog) throws IOException {
diff --git a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
index 8fbaae049706..d6b48a84dec4 100644
--- a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
+++ b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java
@@ -369,15 +369,14 @@ public static Object clientCall(String jwt, URL url, String method, boolean json
con.setDoInput(true);
if (arg != null) {
con.setDoOutput(true);
- DataOutputStream wr = new DataOutputStream(con.getOutputStream());
- if (json) {
- String outjson = serialize(arg);
- wr.writeBytes(outjson);
- } else {
- wr.writeBytes(arg.toString());
+ try (DataOutputStream wr = new DataOutputStream(con.getOutputStream())) {
+ if (json) {
+ wr.writeBytes(serialize(arg));
+ } else {
+ wr.writeBytes(arg.toString());
+ }
+ wr.flush();
}
- wr.flush();
- wr.close();
}
// perform http method
return httpResponse(con);
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index adb9c78fb698..503f15702c42 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -769,10 +769,10 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
}
}
// optionally create and start the property and Iceberg REST server
- ServletServerBuilder.Descriptor properties = PropertyServlet.createServlet(conf);
- ServletServerBuilder.Descriptor catalog = createIcebergServlet(conf);
ServletServerBuilder builder = new ServletServerBuilder(conf);
+ ServletServerBuilder.Descriptor properties = PropertyServlet.createServlet(conf);
builder.addServlet(properties);
+ ServletServerBuilder.Descriptor catalog = createIcebergServlet(conf);
builder.addServlet(catalog);
servletServer = builder.start(LOG);
if (servletServer != null) {
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index c5893ca4f1f8..fab395df577a 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -60,16 +60,12 @@ public class PropertyServlet extends HttpServlet {
/** The configuration. */
private final Configuration configuration;
- static boolean isAuthJwt(Configuration configuration) {
- String auth = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_AUTH);
- return "jwt".equalsIgnoreCase(auth);
- }
-
PropertyServlet(Configuration configuration) {
this.configuration = configuration;
}
- @Override public String getServletName() {
+ @Override
+ public String getServletName() {
return "HMS property";
}
@@ -166,44 +162,12 @@ protected void doPost(HttpServletRequest request,
switch (method) {
// fetch a list of qualified keys by name
case "fetchProperties": {
- // one or many keys
- Object jsonKeys = call.get("keys");
- if (jsonKeys == null) {
- throw new IllegalArgumentException("null keys");
- }
- Iterable> keys = jsonKeys instanceof List>
- ? (List>) jsonKeys
- : Collections.singletonList(jsonKeys);
- Map properties = new TreeMap<>();
- for (Object okey : keys) {
- String key = okey.toString();
- String value = mgr.exportPropertyValue(key);
- if (value != null) {
- properties.put(key, value);
- }
- }
- reactions.add(properties);
+ fetchProperties( mgr, call, reactions);
break;
}
// select a list of qualified keys by prefix/predicate/selection
case "selectProperties": {
- String prefix = (String) call.get("prefix");
- if (prefix == null) {
- throw new IllegalArgumentException("null prefix");
- }
- String predicate = (String) call.get("predicate");
- // selection may be null, a sole property or a list
- Object selection = call.get("selection");
- @SuppressWarnings("unchecked") List project =
- selection == null
- ? null
- : selection instanceof List>
- ? (List) selection
- : Collections.singletonList(selection.toString());
- Map selected = mgr.selectProperties(prefix, predicate, project);
- Map> returned = new TreeMap<>();
- selected.forEach((k, v) -> returned.put(k, v.export(project == null)));
- reactions.add(returned);
+ selectProperties(mgr, call, reactions);
break;
}
case "script": {
@@ -237,18 +201,45 @@ protected void doPost(HttpServletRequest request,
}
}
-// A way to import values using files sent over http
-// private void importProperties(HttpServletRequest request) throws ServletException, IOException {
-// List fileParts = request.getParts().stream()
-// .filter(part -> "files".equals(part.getName()) && part.getSize() > 0)
-// .collect(Collectors.toList()); // Retrieves
-//
-// for (Part filePart : fileParts) {
-// String fileName = Paths.get(filePart.getSubmittedFileName()).getFileName().toString(); // MSIE fix.
-// InputStream fileContent = filePart.getInputStream();
-// // ... (do your job here)
-// }
-// }
+ private static void fetchProperties(PropertyManager mgr, Map call, List reactions) {
+ // one or many keys
+ Object jsonKeys = call.get("keys");
+ if (jsonKeys == null) {
+ throw new IllegalArgumentException("null keys");
+ }
+ Iterable> keys = jsonKeys instanceof List>
+ ? (List>) jsonKeys
+ : Collections.singletonList(jsonKeys);
+ Map properties = new TreeMap<>();
+ for (Object okey : keys) {
+ String key = okey.toString();
+ String value = mgr.exportPropertyValue(key);
+ if (value != null) {
+ properties.put(key, value);
+ }
+ }
+ reactions.add(properties);
+ }
+
+ private static void selectProperties(PropertyManager mgr, Map call, List reactions) {
+ String prefix = (String) call.get("prefix");
+ if (prefix == null) {
+ throw new IllegalArgumentException("null prefix");
+ }
+ String predicate = (String) call.get("predicate");
+ // selection may be null, a sole property or a list
+ Object selection = call.get("selection");
+ @SuppressWarnings("unchecked") List project =
+ selection == null
+ ? null
+ : selection instanceof List>
+ ? (List) selection
+ : Collections.singletonList(selection.toString());
+ Map selected = mgr.selectProperties(prefix, predicate, project);
+ Map> returned = new TreeMap<>();
+ selected.forEach((k, v) -> returned.put(k, v.export(project == null)));
+ reactions.add(returned);
+ }
@Override
protected void doPut(HttpServletRequest request,
@@ -322,7 +313,7 @@ public static ServletServerBuilder.Descriptor createServlet(Configuration config
int port = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PORT);
String path = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
if (port >= 0 && path != null && !path.isEmpty()) {
- ServletSecurity security = new ServletSecurity(configuration, PropertyServlet.isAuthJwt(configuration));
+ ServletSecurity security = new ServletSecurity(configuration);
HttpServlet servlet = security.proxy(new PropertyServlet(configuration));
return new ServletServerBuilder.Descriptor(port, path, servlet) {
@Override public String toString() {
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
index 71c9b20fb80e..197c56e057c2 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
@@ -86,8 +86,7 @@ public class ServletSecurity {
private JWTValidator jwtValidator = null;
public ServletSecurity(Configuration conf) {
- this(conf, MetastoreConf.getVar(conf,
- MetastoreConf.ConfVars.THRIFT_METASTORE_AUTHENTICATION).equalsIgnoreCase("jwt"));
+ this(conf, isAuthJwt(conf));
}
public ServletSecurity(Configuration conf, boolean jwt) {
@@ -96,6 +95,11 @@ public ServletSecurity(Configuration conf, boolean jwt) {
this.jwtAuthEnabled = jwt;
}
+ public static boolean isAuthJwt(Configuration configuration) {
+ String auth = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.PROPERTIES_SERVLET_AUTH);
+ return "jwt".equalsIgnoreCase(auth);
+ }
+
/**
* Should be called in Servlet.init()
* @throws ServletException if the jwt validator creation throws an exception
@@ -284,7 +288,7 @@ public static SslContextFactory createSslContextFactory(Configuration conf) thro
}
final String keyStorePath = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.SSL_KEYSTORE_PATH).trim();
if (keyStorePath.isEmpty()) {
- throw new IllegalArgumentException(MetastoreConf.ConfVars.SSL_KEYSTORE_PATH.toString()
+ throw new IllegalArgumentException(MetastoreConf.ConfVars.SSL_KEYSTORE_PATH
+ " Not configured for SSL connection");
}
final String keyStorePassword =
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
index 46baad72fdc1..9f6ac2b76661 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -18,17 +18,6 @@
*/
package org.apache.hadoop.hive.metastore;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.IdentityHashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.function.Function;
-import javax.servlet.Servlet;
-import javax.servlet.http.HttpServlet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.eclipse.jetty.server.HttpConfiguration;
@@ -44,19 +33,31 @@
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.slf4j.Logger;
+import javax.servlet.Servlet;
+import javax.servlet.http.HttpServlet;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.IdentityHashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
/**
* Helper class to ease creation of embedded Jetty serving servlets on
* different ports.
*/
public class ServletServerBuilder {
- /**
- * Keeping track of descriptors.
- */
- private final Map descriptorsMap = new IdentityHashMap<>();
/**
* The configuration instance.
*/
protected final Configuration configuration;
+ /**
+ * Keeping track of descriptors.
+ */
+ private final Map descriptorsMap = new IdentityHashMap<>();
/**
* Creates a builder instance.
@@ -67,52 +68,49 @@ protected ServletServerBuilder(Configuration conf) {
this.configuration = conf;
}
- public Configuration getConfiguration() {
- return configuration;
- }
-
/**
- * A descriptor of a servlet.
- *
After server is started, unspecified port will be updated to reflect
- * what the system allocated.
+ * Creates a builder.
+ *
+ * @param conf the configuration
+ * @param describe the functions to call that create servlet descriptors
+ * @return the builder or null if no descriptors
*/
- public static class Descriptor {
- private int port;
- private final String path;
- private final HttpServlet servlet;
-
- /**
- * Create a servlet descriptor.
- * @param port the servlet port (or 0 if system allocated)
- * @param path the servlet path
- * @param servlet the servlet instance
- */
- public Descriptor(int port, String path, HttpServlet servlet) {
- this.port = port;
- this.path = path;
- this.servlet = servlet;
- }
-
- @Override
- public String toString() {
- return servlet.getClass().getSimpleName() + ":" + port+ "/"+ path ;
- }
-
- public int getPort() {
- return port;
- }
-
- public String getPath() {
- return path;
+ @SafeVarargs
+ public static ServletServerBuilder builder(Configuration conf,
+ Function... describe) {
+ List descriptors = new ArrayList();
+ Arrays.asList(describe).forEach(functor -> {
+ ServletServerBuilder.Descriptor descriptor = functor.apply(conf);
+ if (descriptor != null) {
+ descriptors.add(descriptor);
+ }
+ });
+ if (!descriptors.isEmpty()) {
+ ServletServerBuilder builder = new ServletServerBuilder(conf);
+ descriptors.forEach(d -> builder.addServlet(d));
+ return builder;
}
+ return null;
+ }
- public HttpServlet getServlet() {
- return servlet;
- }
+ /**
+ * Helper for generic use case.
+ *
+ * @param logger the logger
+ * @param conf the configuration
+ * @param describe the functions to create descriptors
+ * @return a server instance
+ */
+ @SafeVarargs
+ public static Server startServer(
+ Logger logger,
+ Configuration conf,
+ Function... describe) {
+ return builder(conf, describe).start(logger);
+ }
- void setPort(int port) {
- this.port = port;
- }
+ public Configuration getConfiguration() {
+ return configuration;
}
/**
@@ -120,13 +118,14 @@ void setPort(int port) {
*
The servlet port can be shared between servlets; if 0, the system will provide
* a port. If the port is < 0, the system will provide a port dedicated (ie non-shared)
* to the servlet.
- * @param port the servlet port
- * @param path the servlet path
+ *
+ * @param port the servlet port
+ * @param path the servlet path
* @param servlet a servlet instance
* @return a descriptor
*/
- public Descriptor addServlet(int port, String path, HttpServlet servlet){
- Descriptor descriptor = new Descriptor(port, path, servlet);
+ public Descriptor addServlet(int port, String path, HttpServlet servlet) {
+ Descriptor descriptor = new Descriptor(port, path, servlet);
return addServlet(descriptor);
}
@@ -163,9 +162,9 @@ protected Server createServer() throws IOException {
/**
* Creates a server instance and a connector on a given port.
*
- * @param server the server instance
+ * @param server the server instance
* @param sslContextFactory the ssl factory
- * @param port the port
+ * @param port the port
* @return the server connector listening to the port
* @throws IOException if server creation fails
*/
@@ -185,8 +184,9 @@ protected ServerConnector createConnector(Server server, SslContextFactory sslCo
/**
* Adds a servlet to its intended servlet context context.
+ *
* @param handlersMap the map of port to handlers
- * @param descriptor the servlet descriptor
+ * @param descriptor the servlet descriptor
* @throws IOException
*/
protected void addServlet(Map handlersMap, Descriptor descriptor) throws IOException {
@@ -216,7 +216,7 @@ protected void addServlet(Map handlersMap, Descr
public Server startServer() throws Exception {
// add all servlets
Map handlersMap = new HashMap<>();
- for(Descriptor descriptor : descriptorsMap.values()) {
+ for (Descriptor descriptor : descriptorsMap.values()) {
addServlet(handlersMap, descriptor);
}
final int size = handlersMap.size();
@@ -232,15 +232,15 @@ public Server startServer() throws Exception {
for (int c = 0; it.hasNext(); ++c) {
Map.Entry entry = it.next();
int key = entry.getKey();
- int port = key < 0? 0 : key;
+ int port = Math.max(key, 0);
ServerConnector connector = createConnector(server, sslFactory, port);
connectors[c] = connector;
ServletContextHandler handler = entry.getValue();
handlers[c] = handler;
// make each servlet context be served only by its dedicated connector
- String host = "hms" + Integer.toString(c);
+ String host = "hms" + c;
connector.setName(host);
- handler.setVirtualHosts(new String[]{"@"+host});
+ handler.setVirtualHosts(new String[]{"@" + host});
}
// hook the connectors and the handlers
server.setConnectors(connectors);
@@ -254,7 +254,7 @@ public Server startServer() throws Exception {
int port = connectors[i].getLocalPort();
ServletContextHandler handler = handlers[i];
ServletHolder[] holders = handler.getServletHandler().getServlets();
- for(ServletHolder holder : holders) {
+ for (ServletHolder holder : holders) {
Servlet servlet = holder.getServletInstance();
if (servlet != null) {
Descriptor descriptor = descriptorsMap.get(servlet);
@@ -267,32 +267,9 @@ public Server startServer() throws Exception {
return server;
}
- /**
- * Creates a builder.
- * @param conf the configuration
- * @param describe the functions to call that create servlet descriptors
- * @return the builder or null if no descriptors
- */
- @SafeVarargs
- public static ServletServerBuilder builder(Configuration conf,
- Function... describe) {
- List descriptors = new ArrayList();
- Arrays.asList(describe).forEach(functor -> {
- ServletServerBuilder.Descriptor descriptor = functor.apply(conf);
- if (descriptor != null) {
- descriptors.add(descriptor);
- }
- });
- if (!descriptors.isEmpty()) {
- ServletServerBuilder builder = new ServletServerBuilder(conf);
- descriptors.forEach(d -> builder.addServlet(d));
- return builder;
- }
- return null;
- }
-
/**
* Creates and starts the server.
+ *
* @param logger a logger to output info
* @return the server instance (or null if error)
*/
@@ -312,28 +289,55 @@ public Server start(Logger logger) {
}
}
return server;
- } catch (Exception exception) {
- logger.error("Unable to start servlet server", exception);
- return null;
} catch (Throwable throwable) {
logger.error("Unable to start servlet server", throwable);
return null;
}
}
-
- /**
- * Helper for generic use case.
- * @param logger the logger
- * @param conf the configuration
- * @param describe the functions to create descriptors
- * @return a server instance
+
+ /**
+ * A descriptor of a servlet.
+ *
After server is started, unspecified port will be updated to reflect
+ * what the system allocated.
Called by the static method {@link HMSCatalogFactory#createServlet(Configuration)} that is
+ * declared in configuration and found through introspection.
This one is looked up through reflection to start from HMS.
+ *
This method name is found through configuration as {@link MetastoreConf.ConfVars#ICEBERG_CATALOG_SERVLET_FACTORY}
+ * and looked up through reflection to start from HMS.
*
* @param configuration the configuration
* @return the servlet descriptor instance
*/
+ @SuppressWarnings("unused")
public static ServletServerBuilder.Descriptor createServlet(Configuration configuration) {
try {
HMSCatalogFactory hms = new HMSCatalogFactory(configuration, null);
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
index 4b0e6a47080c..e6ec84c99118 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
@@ -58,7 +58,12 @@ public class HMSCatalogServlet extends HttpServlet {
public HMSCatalogServlet(HMSCatalogAdapter restCatalogAdapter) {
this.restCatalogAdapter = restCatalogAdapter;
}
-
+
+ @Override
+ public String getServletName() {
+ return "HMS Catalog";
+ }
+
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) {
try {
diff --git a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
index 8b5a795e2c2d..7b05602fc123 100644
--- a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
+++ b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java
@@ -22,7 +22,7 @@
import java.io.IOException;
import java.net.URI;
import java.net.URL;
-import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
@@ -71,8 +71,8 @@ public void testCreateNamespaceHttp() throws Exception {
Map nsrep = (Map) response;
List> nslist = (List>) nsrep.get("namespaces");
Assert.assertEquals(2, nslist.size());
- Assert.assertTrue((nslist.contains(Arrays.asList("default"))));
- Assert.assertTrue((nslist.contains(Arrays.asList("hivedb"))));
+ Assert.assertTrue((nslist.contains(Collections.singletonList("default"))));
+ Assert.assertTrue((nslist.contains(Collections.singletonList("hivedb"))));
// succeed
response = clientCall(jwt, url, "POST", false, "{ \"namespace\" : [ \""+ns+"\" ], "+
"\"properties\":{ \"owner\": \"apache\", \"group\" : \"iceberg\" }"
@@ -94,9 +94,7 @@ public void testCreateNamespaceHttp() throws Exception {
// quick check on metrics
Map counters = reportMetricCounters("list_namespaces", "list_tables");
- counters.entrySet().forEach(m->{
- Assert.assertTrue(m.getKey(), m.getValue() > 0);
- });
+ counters.forEach((key, value) -> Assert.assertTrue(key, value > 0));
}
private Schema getTestSchema() {
@@ -136,8 +134,8 @@ public void testCreateTableTxnBuilder() throws Exception {
Assert.assertEquals(200, (int) eval(response, "json -> json.status"));
List> nslist = (List>) eval(response, "json -> json.namespaces");
Assert.assertEquals(2, nslist.size());
- Assert.assertTrue((nslist.contains(Arrays.asList("default"))));
- Assert.assertTrue((nslist.contains(Arrays.asList("hivedb"))));
+ Assert.assertTrue((nslist.contains(Collections.singletonList("default"))));
+ Assert.assertTrue((nslist.contains(Collections.singletonList("hivedb"))));
// list tables in hivedb
url = iceUri.resolve("namespaces/" + DB_NAME + "/tables").toURL();
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 503f15702c42..77812cc2cffe 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -768,6 +768,7 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
throw e;
}
}
+
// optionally create and start the property and Iceberg REST server
ServletServerBuilder builder = new ServletServerBuilder(conf);
ServletServerBuilder.Descriptor properties = PropertyServlet.createServlet(conf);
@@ -783,6 +784,7 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
catalogServletPort = catalog.getPort();
}
}
+
// main server
thriftServer.start();
}
@@ -799,10 +801,10 @@ static ServletServerBuilder.Descriptor createIcebergServlet(Configuration config
Method iceStart = iceClazz.getMethod("createServlet", Configuration.class);
return (ServletServerBuilder.Descriptor) iceStart.invoke(null, configuration);
} catch (ClassNotFoundException xnf) {
- LOG.warn("unable to start Iceberg REST Catalog server, missing jar?", xnf);
+ LOG.warn("Unable to start Iceberg REST Catalog server, missing jar?", xnf);
return null;
} catch (Exception e) {
- LOG.error("unable to start Iceberg REST Catalog server", e);
+ LOG.error("Unable to start Iceberg REST Catalog server", e);
return null;
}
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index fab395df577a..ffb1be3058ad 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -66,7 +66,7 @@ public class PropertyServlet extends HttpServlet {
@Override
public String getServletName() {
- return "HMS property";
+ return "HMS Property";
}
private String strError(String msg, Object...args) {
@@ -180,7 +180,7 @@ protected void doPost(HttpServletRequest request,
break;
}
default: {
- throw new IllegalArgumentException("bad argument type " + action.getClass());
+ throw new IllegalArgumentException("Bad argument type " + action.getClass());
}
}
}
@@ -317,12 +317,12 @@ public static ServletServerBuilder.Descriptor createServlet(Configuration config
HttpServlet servlet = security.proxy(new PropertyServlet(configuration));
return new ServletServerBuilder.Descriptor(port, path, servlet) {
@Override public String toString() {
- return "HMS property";
+ return "HMS Property";
}
};
}
} catch (Exception io) {
- LOGGER.error("failed to create servlet ", io);
+ LOGGER.error("Failed to create servlet ", io);
}
return null;
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
index 197c56e057c2..aee8d37b77b2 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
@@ -125,12 +125,14 @@ public class ProxyServlet extends HttpServlet {
this.delegate = delegate;
}
- @Override public void init() throws ServletException {
+ @Override
+ public void init() throws ServletException {
ServletSecurity.this.init();
delegate.init();
}
- @Override public void service(HttpServletRequest request, HttpServletResponse response) throws IOException {
+ @Override
+ public void service(HttpServletRequest request, HttpServletResponse response) throws IOException {
execute(request, response, delegate::service);
}
}
@@ -281,7 +283,7 @@ static void loginServerPrincipal(Configuration conf) throws IOException {
* @return null if no ssl in config, an instance otherwise
* @throws IOException if getting password fails
*/
- public static SslContextFactory createSslContextFactory(Configuration conf) throws IOException {
+ static SslContextFactory createSslContextFactory(Configuration conf) throws IOException {
final boolean useSsl = MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_SSL);
if (!useSsl) {
return null;
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
index 9f6ac2b76661..066c0c6a31f8 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -35,7 +35,6 @@
import javax.servlet.Servlet;
import javax.servlet.http.HttpServlet;
-import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -43,6 +42,7 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.function.Function;
/**
@@ -53,7 +53,7 @@ public class ServletServerBuilder {
/**
* The configuration instance.
*/
- protected final Configuration configuration;
+ private final Configuration configuration;
/**
* Keeping track of descriptors.
*/
@@ -64,7 +64,7 @@ public class ServletServerBuilder {
*
* @param conf the configuration
*/
- protected ServletServerBuilder(Configuration conf) {
+ public ServletServerBuilder(Configuration conf) {
this.configuration = conf;
}
@@ -78,7 +78,7 @@ protected ServletServerBuilder(Configuration conf) {
@SafeVarargs
public static ServletServerBuilder builder(Configuration conf,
Function... describe) {
- List descriptors = new ArrayList();
+ List descriptors = new ArrayList<>();
Arrays.asList(describe).forEach(functor -> {
ServletServerBuilder.Descriptor descriptor = functor.apply(conf);
if (descriptor != null) {
@@ -87,7 +87,7 @@ public static ServletServerBuilder builder(Configuration conf,
});
if (!descriptors.isEmpty()) {
ServletServerBuilder builder = new ServletServerBuilder(conf);
- descriptors.forEach(d -> builder.addServlet(d));
+ descriptors.forEach(builder::addServlet);
return builder;
}
return null;
@@ -106,7 +106,7 @@ public static Server startServer(
Logger logger,
Configuration conf,
Function... describe) {
- return builder(conf, describe).start(logger);
+ return Objects.requireNonNull(builder(conf, describe)).start(logger);
}
public Configuration getConfiguration() {
@@ -147,9 +147,8 @@ public Descriptor addServlet(Descriptor descriptor) {
*
Default use configuration to determine thread-pool constants?
*
* @return the server instance
- * @throws IOException if server creation fails
*/
- protected Server createServer() throws IOException {
+ private Server createServer() {
final int maxThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_MAX);
final int minThreads = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_MIN);
final int idleTimeout = MetastoreConf.getIntVar(configuration, MetastoreConf.ConfVars.HTTPSERVER_THREADPOOL_IDLE);
@@ -166,9 +165,8 @@ protected Server createServer() throws IOException {
* @param sslContextFactory the ssl factory
* @param port the port
* @return the server connector listening to the port
- * @throws IOException if server creation fails
*/
- protected ServerConnector createConnector(Server server, SslContextFactory sslContextFactory, int port) throws IOException {
+ private ServerConnector createConnector(Server server, SslContextFactory sslContextFactory, int port) {
final ServerConnector connector = new ServerConnector(server, sslContextFactory);
connector.setPort(port);
connector.setReuseAddress(true);
@@ -187,9 +185,8 @@ protected ServerConnector createConnector(Server server, SslContextFactory sslCo
*
* @param handlersMap the map of port to handlers
* @param descriptor the servlet descriptor
- * @throws IOException
*/
- protected void addServlet(Map handlersMap, Descriptor descriptor) throws IOException {
+ private void addServlet(Map handlersMap, Descriptor descriptor) {
final int port = descriptor.getPort();
final String path = descriptor.getPath();
final HttpServlet servlet = descriptor.getServlet();
@@ -280,12 +277,10 @@ public Server start(Logger logger) {
if (!server.isStarted()) {
logger.error("Unable to start servlet server on {}", server.getURI());
} else {
- descriptorsMap.values().forEach(descriptor -> {
- logger.info("Started {} servlet on {}:{}",
- descriptor.toString(),
- descriptor.getPort(),
- descriptor.getPath());
- });
+ descriptorsMap.values().forEach(descriptor -> logger.info("Started {} servlet on {}:{}",
+ descriptor.toString(),
+ descriptor.getPort(),
+ descriptor.getPath()));
}
}
return server;
@@ -320,6 +315,7 @@ public Descriptor(int port, String path, HttpServlet servlet) {
@Override
public String toString() {
+ // can not use the servlet name since it is only valid after calling init()
return servlet.getClass().getSimpleName() + ":" + port + "/" + path;
}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
index 51c4e26727d2..f196d66c33b3 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest.java
@@ -59,7 +59,7 @@
public class HMSServletTest extends HMSTestBase {
String path = null;
Server servletServer = null;
- int sport = -1;
+ int servletPort = -1;
@Before
public void setUp() throws Exception {
@@ -67,26 +67,28 @@ public void setUp() throws Exception {
path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
}
- @Override protected int createServer(Configuration conf) throws Exception {
+ @Override
+ protected int createServer(Configuration conf) throws Exception {
if (servletServer == null) {
servletServer = PropertyServlet.startServer(conf);
if (servletServer == null || !servletServer.isStarted()) {
Assert.fail("http server did not start");
}
- sport = servletServer.getURI().getPort();
+ servletPort = servletServer.getURI().getPort();
}
- return sport;
+ return servletPort;
}
/**
* Stops the server.
* @param port the server port
*/
- @Override protected void stopServer(int port) throws Exception {
+ @Override
+ protected void stopServer(int port) throws Exception {
if (servletServer != null) {
servletServer.stop();
servletServer = null;
- sport = -1;
+ servletPort = -1;
}
}
@@ -154,7 +156,7 @@ public Map getProperties(List selection) {
@Test
public void testServletEchoA() throws Exception {
- URL url = new URL("http://hive@localhost:" + sport + "/" + path + "/" + NS);
+ URL url = new URL("http://hive@localhost:" + servletPort + "/" + path + "/" + NS);
Map json = Collections.singletonMap("method", "echo");
String jwt = generateJWT();
// succeed
@@ -186,7 +188,7 @@ public void testProperties0() throws Exception {
.setScheme("http")
.setUserInfo("hive")
.setHost("localhost")
- .setPort(sport)
+ .setPort(servletPort)
.setPath("/" + path + "/" + NS)
.setParameters(nvp)
.build();
@@ -303,7 +305,7 @@ public static Object clientCall(String jwt, URL url, String method, Object arg)
* @throws Exception
*/
private HttpPost createPost(String jwt, String msgBody) {
- HttpPost method = new HttpPost("http://hive@localhost:" + sport + "/" + path + "/" + NS);
+ HttpPost method = new HttpPost("http://hive@localhost:" + servletPort + "/" + path + "/" + NS);
method.addHeader("Authorization", "Bearer " + jwt);
method.addHeader("Content-Type", "application/json");
method.addHeader("Accept", "application/json");
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java
index 1a096e38f30c..b1c8b803dff9 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1.java
@@ -30,6 +30,7 @@
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
@@ -48,12 +49,14 @@ public class HMSServletTest1 extends HMSServletTest {
public void tearDown() throws Exception {
if (client instanceof AutoCloseable) {
((AutoCloseable) client).close();
+ client = null;
}
super.tearDown();
}
@Override
protected PropertyClient createClient(Configuration conf, int sport) throws Exception {
+ String path = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.PROPERTIES_SERVLET_PATH);
URL url = new URL("http://hive@localhost:" + sport + "/" + path + "/" + NS);
String jwt = generateJWT();
return new JSonHttpClient(jwt, url.toString());
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
index 1cf4b3e4e26e..fd58d53e1f19 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTest1A.java
@@ -50,8 +50,8 @@ protected int createServer(Configuration conf) throws Exception {
if (servletServer == null || !servletServer.isStarted()) {
Assert.fail("http server did not start");
}
- sport = HiveMetaStore.getPropertyServletPort();
- return sport;
+ servletPort = HiveMetaStore.getPropertyServletPort();
+ return servletPort;
}
@Override
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
index 41a2ba06233d..3a8fb16028f0 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/properties/HMSServletTestA.java
@@ -49,8 +49,8 @@ protected int createServer(Configuration conf) throws Exception {
if (servletServer == null || !servletServer.isStarted()) {
Assert.fail("http server did not start");
}
- sport = HiveMetaStore.getPropertyServletPort();
- return sport;
+ servletPort = HiveMetaStore.getPropertyServletPort();
+ return servletPort;
}
@Override
From 370a0e00bf2e9773abecbeef3032c3954133000f Mon Sep 17 00:00:00 2001
From: Henrib
Date: Mon, 3 Mar 2025 09:26:18 +0100
Subject: [PATCH 38/40] HIVE-28059 : fixing javadoc;
---
.../org/apache/iceberg/rest/HMSCatalogFactory.java | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
index 76327d1fa80f..5add2e51564e 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
@@ -35,7 +35,7 @@
import org.slf4j.LoggerFactory;
/**
- * Catalog & servlet factory.
+ * Catalog & servlet factory.
*/
public class HMSCatalogFactory {
private static final Logger LOG = LoggerFactory.getLogger(HMSCatalogFactory.class);
@@ -103,12 +103,12 @@ protected Catalog createCatalog() {
if (configExtWarehouse != null) {
properties.put("external-warehouse", configExtWarehouse);
}
- final HiveCatalog catalog = new org.apache.iceberg.hive.HiveCatalog();
- catalog.setConf(configuration);
+ final HiveCatalog hiveCatalog = new org.apache.iceberg.hive.HiveCatalog();
+ hiveCatalog.setConf(configuration);
final String catalogName = MetastoreConf.getVar(configuration, MetastoreConf.ConfVars.CATALOG_DEFAULT);
- catalog.initialize(catalogName, properties);
+ hiveCatalog.initialize(catalogName, properties);
long expiry = MetastoreConf.getLongVar(configuration, MetastoreConf.ConfVars.ICEBERG_CATALOG_CACHE_EXPIRY);
- return expiry > 0 ? new HMSCachingCatalog<>(catalog, expiry) : catalog;
+ return expiry > 0 ? new HMSCachingCatalog<>(hiveCatalog, expiry) : hiveCatalog;
}
/**
From d44bfe08c3d305a7cec7fc029ed759765ae09180 Mon Sep 17 00:00:00 2001
From: Henrib
Date: Thu, 6 Mar 2025 17:53:49 +0100
Subject: [PATCH 39/40] HIVE-28059 : latest nits (servlet names, etc); - fixing
test failures;
---
.../iceberg/rest/HMSCatalogAdapter.java | 2 ++
.../iceberg/rest/HMSCatalogFactory.java | 6 +----
.../iceberg/rest/HMSCatalogServlet.java | 2 +-
.../hadoop/hive/metastore/HiveMetaStore.java | 8 +++----
.../hive/metastore/PropertyServlet.java | 6 +----
.../hive/metastore/ServletSecurity.java | 22 ++++++++++++++++++-
.../hive/metastore/ServletServerBuilder.java | 12 ++++++++--
7 files changed, 39 insertions(+), 19 deletions(-)
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
index cc2738008f75..dbf280396f1e 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java
@@ -297,12 +297,14 @@ private ConfigResponse config() {
}
private OAuthTokenResponse tokens(Object body) {
+ @SuppressWarnings("unchecked")
Map request = (Map) castRequest(Map.class, body);
String grantType = request.get(GRANT_TYPE);
switch (grantType) {
case CLIENT_CREDENTIALS:
return OAuthTokenResponse.builder()
.withToken("client-credentials-token:sub=" + request.get(CLIENT_ID))
+ .withIssuedTokenType(URN_OAUTH_ACCESS_TOKEN)
.withTokenType(BEARER)
.build();
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
index 5add2e51564e..1bddb3e6842d 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
@@ -152,11 +152,7 @@ public static ServletServerBuilder.Descriptor createServlet(Configuration config
HMSCatalogFactory hms = new HMSCatalogFactory(configuration, null);
HttpServlet servlet = hms.createServlet();
if (servlet != null) {
- return new ServletServerBuilder.Descriptor(hms.getPort(), hms.getPath(), servlet) {
- @Override public String toString() {
- return "Iceberg REST Catalog";
- }
- };
+ return new ServletServerBuilder.Descriptor(hms.getPort(), hms.getPath(), servlet);
}
} catch (IOException exception) {
LOG.error("failed to create servlet ", exception);
diff --git a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
index e6ec84c99118..b164709149b1 100644
--- a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
+++ b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java
@@ -61,7 +61,7 @@ public HMSCatalogServlet(HMSCatalogAdapter restCatalogAdapter) {
@Override
public String getServletName() {
- return "HMS Catalog";
+ return "Iceberg REST Catalog";
}
@Override
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 77812cc2cffe..edc91e7c51de 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -771,10 +771,8 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
// optionally create and start the property and Iceberg REST server
ServletServerBuilder builder = new ServletServerBuilder(conf);
- ServletServerBuilder.Descriptor properties = PropertyServlet.createServlet(conf);
- builder.addServlet(properties);
- ServletServerBuilder.Descriptor catalog = createIcebergServlet(conf);
- builder.addServlet(catalog);
+ ServletServerBuilder.Descriptor properties = builder.addServlet(PropertyServlet.createServlet(conf));
+ ServletServerBuilder.Descriptor catalog = builder.addServlet(createIcebergServlet(conf));
servletServer = builder.start(LOG);
if (servletServer != null) {
if (properties != null) {
@@ -812,7 +810,7 @@ static ServletServerBuilder.Descriptor createIcebergServlet(Configuration config
/**
* @param port where metastore server is running
* @return metastore server instance URL. If the metastore server was bound to a configured
- * host, return that appended by port. Otherwise return the externally visible URL of the local
+ * host, return that appended by port. Otherwise, return the externally visible URL of the local
* host with the given port
* @throws Exception
*/
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
index ffb1be3058ad..9437d2558f8a 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PropertyServlet.java
@@ -315,11 +315,7 @@ public static ServletServerBuilder.Descriptor createServlet(Configuration config
if (port >= 0 && path != null && !path.isEmpty()) {
ServletSecurity security = new ServletSecurity(configuration);
HttpServlet servlet = security.proxy(new PropertyServlet(configuration));
- return new ServletServerBuilder.Descriptor(port, path, servlet) {
- @Override public String toString() {
- return "HMS Property";
- }
- };
+ return new ServletServerBuilder.Descriptor(port, path, servlet);
}
} catch (Exception io) {
LOGGER.error("Failed to create servlet ", io);
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
index aee8d37b77b2..d0d48b04df75 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
@@ -135,14 +135,34 @@ public void init() throws ServletException {
public void service(HttpServletRequest request, HttpServletResponse response) throws IOException {
execute(request, response, delegate::service);
}
+
+ @Override
+ public String getServletName() {
+ try {
+ return delegate.getServletName();
+ } catch (IllegalStateException ill) {
+ return delegate.toString();
+ }
+ }
+
+ @Override
+ public String getServletInfo() {
+ return delegate.getServletInfo();
+ }
}
/**
* Creates a proxy servlet.
* @param servlet the servlet to serve within this security context
- * @return a servlet instance
+ * @return a servlet instance or null if security initialization fails
*/
public HttpServlet proxy(HttpServlet servlet) {
+ try {
+ init();
+ } catch (ServletException e) {
+ LOG.error("Unable to proxy security for servlet {}", servlet.toString(), e);
+ return null;
+ }
return new ProxyServlet(servlet);
}
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
index 066c0c6a31f8..7323845ae35f 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletServerBuilder.java
@@ -315,8 +315,16 @@ public Descriptor(int port, String path, HttpServlet servlet) {
@Override
public String toString() {
- // can not use the servlet name since it is only valid after calling init()
- return servlet.getClass().getSimpleName() + ":" + port + "/" + path;
+ String name = null;
+ try {
+ name = servlet.getServletName() + ":" + port + "/" + path;
+ } catch (IllegalStateException ill) {
+ // ignore, it may happen if servlet config is not set (yet)
+ }
+ if (name == null) {
+ name = servlet.getClass().getSimpleName();
+ }
+ return name + ":" + port + "/" + path;
}
public int getPort() {
From 1e7a8da66b57681bdba225e152f78f4d287953bb Mon Sep 17 00:00:00 2001
From: Henrib
Date: Fri, 7 Mar 2025 06:25:01 +0100
Subject: [PATCH 40/40] HIVE-28059 : fixing thrift over http servlet init;
---
.../java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index edc91e7c51de..6cd45e34aff1 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -482,7 +482,8 @@ public void setThreadFactory(ThreadFactory threadFactory) {
IHMSHandler handler = HMSHandlerProxyFactory.getProxy(conf, baseHandler, false);
processor = new ThriftHiveMetastore.Processor<>(handler);
LOG.info("Starting DB backed MetaStore Server with generic processor");
- ServletSecurity security = new ServletSecurity(conf);
+ boolean jwt = MetastoreConf.getVar(conf, ConfVars.THRIFT_METASTORE_AUTHENTICATION).equalsIgnoreCase("jwt");
+ ServletSecurity security = new ServletSecurity(conf, jwt);
Servlet thriftHttpServlet = security.proxy(new TServlet(processor, protocolFactory));
boolean directSqlEnabled = MetastoreConf.getBoolVar(conf, ConfVars.TRY_DIRECT_SQL);