Skip to content

Commit

Permalink
Extract helper method to get HiveMetastore
Browse files Browse the repository at this point in the history
  • Loading branch information
ebyhr committed Dec 11, 2024
1 parent 0abae5c commit e16e792
Show file tree
Hide file tree
Showing 12 changed files with 30 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import io.trino.parquet.reader.MetadataReader;
import io.trino.plugin.base.metrics.FileFormatDataSourceStats;
import io.trino.plugin.hive.TrinoViewHiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import io.trino.plugin.hive.parquet.TrinoParquetDataSource;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
Expand Down Expand Up @@ -160,6 +161,13 @@ public static TrinoFileSystemFactory getFileSystemFactory(QueryRunner queryRunne
.getInjector().getInstance(TrinoFileSystemFactory.class);
}

public static HiveMetastore getHiveMetastore(QueryRunner queryRunner)
{
return ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
}

public static BaseTable loadTable(String tableName,
HiveMetastore metastore,
TrinoFileSystemFactory fileSystemFactory,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,16 @@
import com.google.common.collect.ImmutableMap;
import io.trino.filesystem.Location;
import io.trino.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.testing.QueryRunner;
import io.trino.testing.sql.TestTable;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;

import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Optional;

import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.checkOrcFileSorting;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.tpch.TpchTable.NATION;
import static io.trino.tpch.TpchTable.ORDERS;
import static io.trino.tpch.TpchTable.REGION;
Expand Down Expand Up @@ -59,9 +57,7 @@ protected QueryRunner createQueryRunner()
"iceberg.register-table-procedure.enabled", "true",
"iceberg.writer-sort-buffer-size", "1MB"))
.build();
metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
metastore = getHiveMetastore(queryRunner);
return queryRunner;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import io.trino.SystemSessionProperties;
import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.iceberg.util.FileOperationUtils;
import io.trino.plugin.tpch.TpchPlugin;
import io.trino.testing.AbstractTestQueryFramework;
Expand All @@ -45,6 +44,7 @@
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergSessionProperties.COLLECT_EXTENDED_STATISTICS_ON_WRITE;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.plugin.iceberg.util.EqualityDeleteUtils.writeEqualityDeleteForTable;
import static io.trino.plugin.iceberg.util.FileOperationUtils.FileOperation;
import static io.trino.plugin.iceberg.util.FileOperationUtils.FileType.DATA;
Expand Down Expand Up @@ -100,9 +100,7 @@ protected QueryRunner createQueryRunner()
.put("iceberg.metadata-cache.enabled", "false")
.buildOrThrow());

metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
metastore = getHiveMetastore(queryRunner);

queryRunner.installPlugin(new TpchPlugin());
queryRunner.createCatalog("tpch", "tpch");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import io.opentelemetry.sdk.trace.data.SpanData;
import io.trino.metastore.Database;
import io.trino.metastore.HiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.tpch.TpchPlugin;
import io.trino.spi.security.PrincipalType;
import io.trino.testing.AbstractTestQueryFramework;
Expand All @@ -32,7 +31,7 @@
import java.nio.file.Path;
import java.util.Optional;

import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.testing.TestingSession.testSession;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
Expand Down Expand Up @@ -62,9 +61,7 @@ protected QueryRunner createQueryRunner()
queryRunner.installPlugin(new TestingIcebergPlugin(metastoreDir));
queryRunner.createCatalog("iceberg", "iceberg", ImmutableMap.of());

HiveMetastore metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
HiveMetastore metastore = getHiveMetastore(queryRunner);

Database database = Database.builder()
.setDatabaseName("tiny")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,14 @@
import io.trino.Session;
import io.trino.metastore.HiveMetastore;
import io.trino.metastore.Table;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.sql.tree.ExplainType;
import io.trino.testing.QueryRunner;
import org.junit.jupiter.api.Test;

import java.util.Map;
import java.util.Optional;

import static io.trino.plugin.base.util.Closables.closeAllSuppress;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static org.apache.iceberg.BaseMetastoreTableOperations.METADATA_LOCATION_PROP;
import static org.assertj.core.api.Assertions.assertThat;

Expand All @@ -42,9 +40,7 @@ protected QueryRunner createQueryRunner()
QueryRunner queryRunner = IcebergQueryRunner.builder()
.build();
try {
metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
metastore = getHiveMetastore(queryRunner);

queryRunner.createCatalog("iceberg2", "iceberg", Map.of(
"iceberg.catalog.type", "TESTING_FILE_METASTORE",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.metastore.HiveMetastore;
import io.trino.plugin.hive.TrinoViewHiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
Expand All @@ -32,12 +31,10 @@
import org.apache.iceberg.Table;
import org.junit.jupiter.api.Test;

import java.util.Optional;

import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.createPerTransactionCache;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static org.assertj.core.api.Assertions.assertThat;

public class TestIcebergMergeAppend
Expand All @@ -51,9 +48,7 @@ protected QueryRunner createQueryRunner()
throws Exception
{
QueryRunner queryRunner = IcebergQueryRunner.builder().build();
HiveMetastore metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
HiveMetastore metastore = getHiveMetastore(queryRunner);
CachingHiveMetastore cachingHiveMetastore = createPerTransactionCache(metastore, 1000);
TrinoFileSystemFactory fileSystemFactory = getFileSystemFactory(queryRunner);
tableOperationsProvider = new FileMetastoreTableOperationsProvider(fileSystemFactory);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import io.trino.filesystem.TrinoFileSystemFactory;
import io.trino.metastore.HiveMetastore;
import io.trino.plugin.hive.TrinoViewHiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import io.trino.plugin.iceberg.catalog.IcebergTableOperationsProvider;
import io.trino.plugin.iceberg.catalog.TrinoCatalog;
Expand All @@ -41,7 +40,6 @@
import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.Optional;

import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.trino.SystemSessionProperties.INITIAL_SPLITS_PER_NODE;
Expand All @@ -53,6 +51,7 @@
import static io.trino.plugin.iceberg.DataFileRecord.toDataFileRecord;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.testing.TestingSession.testSessionBuilder;
import static org.assertj.core.api.Assertions.assertThat;

Expand Down Expand Up @@ -88,9 +87,7 @@ protected QueryRunner createQueryRunner()
TrinoFileSystemFactory fileSystemFactory = getFileSystemFactory(queryRunner);
tableOperationsProvider = new FileMetastoreTableOperationsProvider(fileSystemFactory);

HiveMetastore metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
HiveMetastore metastore = getHiveMetastore(queryRunner);

CachingHiveMetastore cachingHiveMetastore = createPerTransactionCache(metastore, 1000);
trinoCatalog = new TrinoHiveCatalog(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import io.trino.filesystem.cache.DefaultCachingHostAddressProvider;
import io.trino.metastore.HiveMetastore;
import io.trino.plugin.hive.TrinoViewHiveMetastore;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import io.trino.plugin.hive.orc.OrcReaderConfig;
import io.trino.plugin.hive.orc.OrcWriterConfig;
Expand Down Expand Up @@ -86,9 +85,9 @@
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService;
import static io.trino.plugin.hive.metastore.cache.CachingHiveMetastore.createPerTransactionCache;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergSplitSource.createFileStatisticsDomain;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.plugin.iceberg.util.EqualityDeleteUtils.writeEqualityDeleteForTable;
import static io.trino.spi.connector.Constraint.alwaysTrue;
import static io.trino.spi.type.BigintType.BIGINT;
Expand Down Expand Up @@ -127,9 +126,7 @@ protected QueryRunner createQueryRunner()
.setMetastoreDirectory(metastoreDir)
.build();

HiveMetastore metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
HiveMetastore metastore = getHiveMetastore(queryRunner);

this.fileSystemFactory = getFileSystemFactory(queryRunner);
CachingHiveMetastore cachingHiveMetastore = createPerTransactionCache(metastore, 1000);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import io.trino.filesystem.TrinoFileSystem;
import io.trino.metastore.HiveMetastore;
import io.trino.metastore.Table;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.MaterializedResult;
import io.trino.testing.QueryRunner;
Expand All @@ -30,8 +29,8 @@

import static io.trino.plugin.hive.TableType.EXTERNAL_TABLE;
import static io.trino.plugin.iceberg.DataFileRecord.toDataFileRecord;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.testing.TestingConnectorSession.SESSION;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
Expand All @@ -50,9 +49,7 @@ protected QueryRunner createQueryRunner()
.setIcebergProperties(Map.of("iceberg.unique-table-location", "true"))
.build();

metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
metastore = getHiveMetastore(queryRunner);

return queryRunner;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import io.trino.filesystem.TrinoFileSystem;
import io.trino.metastore.HiveMetastore;
import io.trino.metastore.Table;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.MaterializedResult;
import io.trino.testing.QueryRunner;
Expand All @@ -27,12 +26,11 @@

import java.io.File;
import java.io.IOException;
import java.util.Optional;

import static io.trino.plugin.hive.TableType.EXTERNAL_TABLE;
import static io.trino.plugin.iceberg.DataFileRecord.toDataFileRecord;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.testing.TestingConnectorSession.SESSION;
import static io.trino.testing.TestingNames.randomNameSuffix;
import static java.lang.String.format;
Expand All @@ -53,9 +51,7 @@ protected QueryRunner createQueryRunner()
QueryRunner queryRunner = IcebergQueryRunner.builder()
.build();

metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
metastore = getHiveMetastore(queryRunner);

return queryRunner;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,13 @@
import io.trino.filesystem.TrinoFileSystem;
import io.trino.metastore.HiveMetastore;
import io.trino.metastore.Table;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.DistributedQueryRunner;
import org.junit.jupiter.api.Test;

import java.util.Optional;

import static io.trino.plugin.hive.TableType.EXTERNAL_TABLE;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.testing.TestingConnectorSession.SESSION;
import static org.assertj.core.api.Assertions.assertThat;

Expand All @@ -44,9 +41,7 @@ protected DistributedQueryRunner createQueryRunner()
.addIcebergProperty("iceberg.object-store-layout.enabled", "true")
.build();

metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
metastore = getHiveMetastore(queryRunner);

fileSystem = getFileSystemFactory(queryRunner).create(SESSION);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import io.trino.plugin.blackhole.BlackHolePlugin;
import io.trino.plugin.hive.HiveStorageFormat;
import io.trino.plugin.hive.TestingHivePlugin;
import io.trino.plugin.hive.metastore.HiveMetastoreFactory;
import io.trino.plugin.iceberg.fileio.ForwardingFileIo;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.Range;
Expand Down Expand Up @@ -95,8 +94,8 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergTestUtils.getFileSystemFactory;
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
import static io.trino.plugin.iceberg.util.EqualityDeleteUtils.writeEqualityDeleteForTable;
import static io.trino.plugin.iceberg.util.EqualityDeleteUtils.writeEqualityDeleteForTableWithSchema;
import static io.trino.spi.type.BigintType.BIGINT;
Expand Down Expand Up @@ -134,9 +133,7 @@ protected QueryRunner createQueryRunner()
.setInitialTables(NATION)
.build();

metastore = ((IcebergConnector) queryRunner.getCoordinator().getConnector(ICEBERG_CATALOG)).getInjector()
.getInstance(HiveMetastoreFactory.class)
.createMetastore(Optional.empty());
metastore = getHiveMetastore(queryRunner);

queryRunner.installPlugin(new TestingHivePlugin(queryRunner.getCoordinator().getBaseDataDir().resolve("iceberg_data")));
queryRunner.createCatalog("hive", "hive", ImmutableMap.<String, String>builder()
Expand Down

0 comments on commit e16e792

Please sign in to comment.