Skip to content

Commit

Permalink
Use jUnit5 based HiveMetastoreExtension with HiveTests, remove HiveMe…
Browse files Browse the repository at this point in the history
…tastoreTest.
  • Loading branch information
nk1506 committed Nov 2, 2023
1 parent c670c1e commit 40de512
Show file tree
Hide file tree
Showing 8 changed files with 136 additions and 353 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,11 @@

import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import org.apache.iceberg.AppendFiles;
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DataFiles;
import org.apache.iceberg.PartitionSpec;
Expand All @@ -43,12 +47,14 @@
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;

public class HiveCreateReplaceTableTest extends HiveMetastoreTest {
public class HiveCreateReplaceTableTest {

private static final String DB_NAME = "hivedb";
private static final String TABLE_NAME = "tbl";
private static final TableIdentifier TABLE_IDENTIFIER = TableIdentifier.of(DB_NAME, TABLE_NAME);
private static final Schema SCHEMA =
Expand All @@ -60,8 +66,23 @@ public class HiveCreateReplaceTableTest extends HiveMetastoreTest {

private String tableLocation;

@RegisterExtension
public static final HiveMetastoreExtension HIVE_METASTORE_EXTENSION =
new HiveMetastoreExtension(DB_NAME, Collections.emptyMap());

protected HiveCatalog catalog;

@BeforeEach
public void createTableLocation() throws IOException {
catalog =
(HiveCatalog)
CatalogUtil.loadCatalog(
HiveCatalog.class.getName(),
CatalogUtil.ICEBERG_CATALOG_TYPE_HIVE,
ImmutableMap.of(
CatalogProperties.CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS,
String.valueOf(TimeUnit.SECONDS.toMillis(10))),
HIVE_METASTORE_EXTENSION.hiveConf());
tableLocation = temp.resolve("hive-").toString();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,4 +77,8 @@ public HiveMetaStoreClient metastoreClient() {
public HiveConf hiveConf() {
return metastore.hiveConf();
}

public TestHiveMetastore metastore() {
return metastore;
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -26,22 +26,34 @@
import java.io.File;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.Path;
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
import org.apache.iceberg.TableMetadataParser;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.types.Types;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.RegisterExtension;

public class HiveTableBaseTest extends HiveMetastoreTest {
public class HiveTableBaseTest {

static final String TABLE_NAME = "tbl";
static final String DB_NAME = "hivedb";
static final TableIdentifier TABLE_IDENTIFIER = TableIdentifier.of(DB_NAME, TABLE_NAME);

@RegisterExtension
public static final HiveMetastoreExtension HIVE_METASTORE_EXTENSION =
new HiveMetastoreExtension(DB_NAME, Collections.emptyMap());

protected HiveCatalog catalog;

static final Schema schema =
new Schema(Types.StructType.of(required(1, "id", Types.LongType.get())).fields());

Expand All @@ -54,23 +66,22 @@ public class HiveTableBaseTest extends HiveMetastoreTest {

private static final PartitionSpec partitionSpec = builderFor(schema).identity("id").build();

private Path tableLocation;

@BeforeEach
public void createTestTable() {
this.tableLocation =
new Path(catalog.createTable(TABLE_IDENTIFIER, schema, partitionSpec).location());
}

@AfterEach
public void dropTestTable() throws Exception {
// drop the table data
tableLocation.getFileSystem(hiveConf).delete(tableLocation, true);
catalog.dropTable(TABLE_IDENTIFIER, false /* metadata only, location was already deleted */);
catalog =
(HiveCatalog)
CatalogUtil.loadCatalog(
HiveCatalog.class.getName(),
CatalogUtil.ICEBERG_CATALOG_TYPE_HIVE,
ImmutableMap.of(
CatalogProperties.CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS,
String.valueOf(TimeUnit.SECONDS.toMillis(10))),
HIVE_METASTORE_EXTENSION.hiveConf());
catalog.createTable(TABLE_IDENTIFIER, schema, partitionSpec);
}

private static String getTableBasePath(String tableName) {
String databasePath = metastore.getDatabasePath(DB_NAME);
String databasePath = HIVE_METASTORE_EXTENSION.metastore().getDatabasePath(DB_NAME);
return Paths.get(databasePath, tableName).toAbsolutePath().toString();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,9 @@ public void testCreate() throws TException {
// Table should be renamed in hive metastore
String tableName = TABLE_IDENTIFIER.name();
org.apache.hadoop.hive.metastore.api.Table table =
metastoreClient.getTable(TABLE_IDENTIFIER.namespace().level(0), tableName);
HIVE_METASTORE_EXTENSION
.metastoreClient()
.getTable(TABLE_IDENTIFIER.namespace().level(0), tableName);

// check parameters are in expected state
Map<String, String> parameters = table.getParameters();
Expand Down Expand Up @@ -255,7 +257,7 @@ public void testExistingTableUpdate() throws TException {
assertThat(icebergTable.schema().asStruct()).isEqualTo(altered.asStruct());

final org.apache.hadoop.hive.metastore.api.Table table =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);
final List<String> hiveColumns =
table.getSd().getCols().stream().map(FieldSchema::getName).collect(Collectors.toList());
final List<String> icebergColumns =
Expand Down Expand Up @@ -309,10 +311,10 @@ public void testColumnTypeChangeInMetastore() throws TException {
public void testFailure() throws TException {
Table icebergTable = catalog.loadTable(TABLE_IDENTIFIER);
org.apache.hadoop.hive.metastore.api.Table table =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);
String dummyLocation = "dummylocation";
table.getParameters().put(METADATA_LOCATION_PROP, dummyLocation);
metastoreClient.alter_table(DB_NAME, TABLE_NAME, table);
HIVE_METASTORE_EXTENSION.metastoreClient().alter_table(DB_NAME, TABLE_NAME, table);
assertThatThrownBy(
() -> icebergTable.updateSchema().addColumn("data", Types.LongType.get()).commit())
.isInstanceOf(CommitFailedException.class)
Expand All @@ -333,7 +335,7 @@ public void testListTables() throws TException, IOException {
// create a hive table
String hiveTableName = "test_hive_table";
org.apache.hadoop.hive.metastore.api.Table hiveTable = createHiveTable(hiveTableName);
metastoreClient.createTable(hiveTable);
HIVE_METASTORE_EXTENSION.metastoreClient().createTable(hiveTable);

catalog.setListAllTables(false);
List<TableIdentifier> tableIdents1 = catalog.listTables(TABLE_IDENTIFIER.namespace());
Expand All @@ -344,7 +346,7 @@ public void testListTables() throws TException, IOException {
assertThat(tableIdents2).as("should be 2 tables in namespace .").hasSize(2);

assertThat(catalog.tableExists(TABLE_IDENTIFIER)).isTrue();
metastoreClient.dropTable(DB_NAME, hiveTableName);
HIVE_METASTORE_EXTENSION.metastoreClient().dropTable(DB_NAME, hiveTableName);
}

private org.apache.hadoop.hive.metastore.api.Table createHiveTable(String hiveTableName)
Expand Down Expand Up @@ -410,13 +412,13 @@ public void testNonDefaultDatabaseLocation() throws IOException, TException {
assertThat(table.location()).isEqualTo(namespaceMeta.get("location") + "/" + TABLE_NAME);

// Drop the database and purge the files
metastoreClient.dropDatabase(NON_DEFAULT_DATABASE, true, true, true);
HIVE_METASTORE_EXTENSION.metastoreClient().dropDatabase(NON_DEFAULT_DATABASE, true, true, true);
}

@Test
public void testRegisterTable() throws TException {
org.apache.hadoop.hive.metastore.api.Table originalTable =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

Map<String, String> originalParams = originalTable.getParameters();
assertThat(originalParams).isNotNull();
Expand All @@ -432,7 +434,7 @@ public void testRegisterTable() throws TException {
catalog.registerTable(TABLE_IDENTIFIER, "file:" + metadataVersionFiles.get(0));

org.apache.hadoop.hive.metastore.api.Table newTable =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

Map<String, String> newTableParameters = newTable.getParameters();
assertThat(newTableParameters)
Expand Down Expand Up @@ -466,7 +468,7 @@ public void testRegisterHadoopTableToHiveCatalog() throws IOException, TExceptio
.collect(Collectors.toList());
assertThat(metadataFiles).hasSize(2);

assertThatThrownBy(() -> metastoreClient.getTable(DB_NAME, "table1"))
assertThatThrownBy(() -> HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, "table1"))
.isInstanceOf(NoSuchObjectException.class)
.hasMessage("hivedb.table1 table not found");
assertThatThrownBy(() -> catalog.loadTable(identifier))
Expand All @@ -476,7 +478,7 @@ public void testRegisterHadoopTableToHiveCatalog() throws IOException, TExceptio
// register the table to hive catalog using the latest metadata file
String latestMetadataFile = ((BaseTable) table).operations().current().metadataFileLocation();
catalog.registerTable(identifier, "file:" + latestMetadataFile);
assertThat(metastoreClient.getTable(DB_NAME, "table1")).isNotNull();
assertThat(HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, "table1")).isNotNull();

// load the table in hive catalog
table = catalog.loadTable(identifier);
Expand Down Expand Up @@ -523,7 +525,7 @@ private String appendData(Table table, String fileName) throws IOException {
@Test
public void testRegisterExistingTable() throws TException {
org.apache.hadoop.hive.metastore.api.Table originalTable =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

Map<String, String> originalParams = originalTable.getParameters();
assertThat(originalParams).isNotNull();
Expand All @@ -550,7 +552,7 @@ public void testEngineHiveEnabledDefault() throws TException {

catalog.createTable(TABLE_IDENTIFIER, schema, PartitionSpec.unpartitioned());
org.apache.hadoop.hive.metastore.api.Table hmsTable =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

assertHiveEnabled(hmsTable, false);
}
Expand All @@ -565,7 +567,7 @@ public void testEngineHiveEnabledConfig() throws TException {

catalog.createTable(TABLE_IDENTIFIER, schema, PartitionSpec.unpartitioned());
org.apache.hadoop.hive.metastore.api.Table hmsTable =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

assertHiveEnabled(hmsTable, true);

Expand All @@ -575,7 +577,7 @@ public void testEngineHiveEnabledConfig() throws TException {
catalog.getConf().set(ConfigProperties.ENGINE_HIVE_ENABLED, "false");

catalog.createTable(TABLE_IDENTIFIER, schema, PartitionSpec.unpartitioned());
hmsTable = metastoreClient.getTable(DB_NAME, TABLE_NAME);
hmsTable = HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

assertHiveEnabled(hmsTable, false);
}
Expand All @@ -592,7 +594,7 @@ public void testEngineHiveEnabledTableProperty() throws TException {

catalog.createTable(TABLE_IDENTIFIER, schema, PartitionSpec.unpartitioned(), tableProperties);
org.apache.hadoop.hive.metastore.api.Table hmsTable =
metastoreClient.getTable(DB_NAME, TABLE_NAME);
HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

assertHiveEnabled(hmsTable, true);

Expand All @@ -603,7 +605,7 @@ public void testEngineHiveEnabledTableProperty() throws TException {
catalog.getConf().set(ConfigProperties.ENGINE_HIVE_ENABLED, "true");

catalog.createTable(TABLE_IDENTIFIER, schema, PartitionSpec.unpartitioned(), tableProperties);
hmsTable = metastoreClient.getTable(DB_NAME, TABLE_NAME);
hmsTable = HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, TABLE_NAME);

assertHiveEnabled(hmsTable, false);
}
Expand Down
Loading

0 comments on commit 40de512

Please sign in to comment.