From 09b1d6b81d57817da4ec44df02d0e1de4a5b7d5e Mon Sep 17 00:00:00 2001 From: slothever Date: Mon, 22 Jan 2024 10:04:34 +0800 Subject: [PATCH 01/16] [feature](multi-catalog)support hms catalog create and drop table/db --- .../java/org/apache/doris/alter/Alter.java | 20 +-- .../doris/analysis/CreateTableStmt.java | 11 +- .../apache/doris/analysis/DropTableStmt.java | 3 - .../org/apache/doris/backup/RestoreJob.java | 6 +- .../doris/catalog/CatalogRecycleBin.java | 4 +- .../org/apache/doris/catalog/Database.java | 4 +- .../org/apache/doris/catalog/DatabaseIf.java | 2 +- .../java/org/apache/doris/catalog/Env.java | 8 +- .../apache/doris/catalog/InfoSchemaDb.java | 4 +- .../catalog/MysqlCompatibleDatabase.java | 6 +- .../org/apache/doris/catalog/MysqlDb.java | 2 +- .../apache/doris/datasource/CatalogMgr.java | 22 +-- .../doris/datasource/ExternalCatalog.java | 27 +++- .../doris/datasource/ExternalDatabase.java | 14 +- .../doris/datasource/InternalCatalog.java | 71 ++------- .../datasource/es/EsExternalDatabase.java | 2 +- .../datasource/hive/HMSExternalCatalog.java | 150 +++++++++++++++++- .../datasource/hive/HMSExternalDatabase.java | 10 +- .../hive/event/AlterDatabaseEvent.java | 4 +- .../hive/event/AlterTableEvent.java | 8 +- .../hive/event/CreateDatabaseEvent.java | 3 +- .../hive/event/CreateTableEvent.java | 2 +- .../hive/event/DropDatabaseEvent.java | 2 +- .../datasource/hive/event/DropTableEvent.java | 2 +- .../iceberg/IcebergExternalDatabase.java | 16 +- .../ExternalInfoSchemaDatabase.java | 2 +- .../datasource/jdbc/JdbcExternalDatabase.java | 2 +- .../MaxComputeExternalDatabase.java | 2 +- .../paimon/PaimonExternalDatabase.java | 11 +- .../datasource/test/TestExternalDatabase.java | 2 +- .../plans/commands/info/CreateTableInfo.java | 8 - .../apache/doris/analysis/AccessTestUtil.java | 2 +- .../apache/doris/backup/CatalogMocker.java | 6 +- .../apache/doris/backup/RestoreJobTest.java | 2 +- .../apache/doris/catalog/CatalogTestUtil.java | 6 +- .../apache/doris/catalog/DatabaseTest.java | 18 +-- .../doris/catalog/InfoSchemaDbTest.java | 4 +- .../org/apache/doris/catalog/MysqlDbTest.java | 4 +- .../apache/doris/clone/DiskRebalanceTest.java | 4 +- .../org/apache/doris/clone/RebalanceTest.java | 2 +- .../doris/common/util/UnitTestUtil.java | 2 +- .../apache/doris/http/DorisHttpTestCase.java | 6 +- .../apache/doris/qe/OlapQueryCacheTest.java | 14 +- 43 files changed, 311 insertions(+), 189 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java b/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java index 5de7fee4fa0f03..051d0fd6738767 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java +++ b/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java @@ -411,8 +411,8 @@ private void processModifyEngineInternal(Database db, Table externalTable, } odbcTable.writeLock(); try { - db.dropTable(mysqlTable.getName()); - db.createTable(odbcTable); + db.removeMemoryTable(mysqlTable.getName()); + db.addMemoryTable(odbcTable); if (!isReplay) { ModifyTableEngineOperationLog log = new ModifyTableEngineOperationLog(db.getId(), externalTable.getId(), prop); @@ -591,17 +591,17 @@ private void replaceTableInternal(Database db, OlapTable origTable, OlapTable ne String newTblName = newTbl.getName(); // drop origin table and new table - db.dropTable(oldTblName); - db.dropTable(newTblName); + db.removeMemoryTable(oldTblName); + db.removeMemoryTable(newTblName); // rename new table name to origin table name and add it to database newTbl.checkAndSetName(oldTblName, false); - db.createTable(newTbl); + db.addMemoryTable(newTbl); if (swapTable) { // rename origin table name to new table name and add it to database origTable.checkAndSetName(newTblName, false); - db.createTable(origTable); + db.addMemoryTable(origTable); } else { // not swap, the origin table is not used anymore, need to drop all its tablets. Env.getCurrentEnv().onEraseOlapTable(origTable, isReplay); @@ -633,8 +633,8 @@ private void modifyViewDef(Database db, View view, String inlineViewDef, long sq } view.setNewFullSchema(newFullSchema); String viewName = view.getName(); - db.dropTable(viewName); - db.createTable(view); + db.removeMemoryTable(viewName); + db.addMemoryTable(view); AlterViewInfo alterViewInfo = new AlterViewInfo(db.getId(), view.getId(), inlineViewDef, newFullSchema, sqlMode); @@ -669,8 +669,8 @@ public void replayModifyViewDef(AlterViewInfo alterViewInfo) throws MetaNotFound } view.setNewFullSchema(newFullSchema); - db.dropTable(viewName); - db.createTable(view); + db.removeMemoryTable(viewName); + db.addMemoryTable(view); LOG.info("replay modify view[{}] definition to {}", viewName, inlineViewDef); } finally { diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java index c6d4d294f120be..cb00a037331916 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java @@ -39,8 +39,7 @@ import org.apache.doris.common.util.ParseUtil; import org.apache.doris.common.util.PrintableMap; import org.apache.doris.common.util.PropertyAnalyzer; -import org.apache.doris.common.util.Util; -import org.apache.doris.datasource.es.EsUtil; +import org.apache.doris.external.elasticsearch.EsUtil; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.qe.ConnectContext; @@ -277,12 +276,6 @@ public List getIndexes() { @Override public void analyze(Analyzer analyzer) throws UserException { - if (Config.isCloudMode() && properties != null - && properties.containsKey(PropertyAnalyzer.ENABLE_UNIQUE_KEY_MERGE_ON_WRITE)) { - // FIXME: MOW is not supported in cloud mode yet. - properties.put(PropertyAnalyzer.ENABLE_UNIQUE_KEY_MERGE_ON_WRITE, "false"); - } - if (Strings.isNullOrEmpty(engineName) || engineName.equalsIgnoreCase(DEFAULT_ENGINE_NAME)) { this.properties = maybeRewriteByAutoBucket(distributionDesc, properties); } @@ -290,8 +283,6 @@ public void analyze(Analyzer analyzer) throws UserException { super.analyze(analyzer); tableName.analyze(analyzer); FeNameFormat.checkTableName(tableName.getTbl()); - // disallow external catalog - Util.prohibitExternalCatalog(tableName.getCtl(), this.getClass().getSimpleName()); InternalDatabaseUtil.checkDatabase(tableName.getDb(), ConnectContext.get()); if (!Env.getCurrentEnv().getAccessManager() .checkTblPriv(ConnectContext.get(), tableName.getDb(), tableName.getTbl(), PrivPredicate.CREATE)) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DropTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DropTableStmt.java index d2ff04186fb41a..70167d744daf5b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DropTableStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DropTableStmt.java @@ -22,7 +22,6 @@ import org.apache.doris.common.ErrorReport; import org.apache.doris.common.UserException; import org.apache.doris.common.util.InternalDatabaseUtil; -import org.apache.doris.common.util.Util; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.qe.ConnectContext; @@ -84,8 +83,6 @@ public void analyze(Analyzer analyzer) throws UserException { tableName.setDb(analyzer.getDefaultDb()); } tableName.analyze(analyzer); - // disallow external catalog - Util.prohibitExternalCatalog(tableName.getCtl(), this.getClass().getSimpleName()); InternalDatabaseUtil.checkDatabase(tableName.getDb(), ConnectContext.get()); // check access if (!Env.getCurrentEnv().getAccessManager().checkTblPriv(ConnectContext.get(), tableName.getDb(), diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java index 2cb9b405557bcb..e04abd920fe1db 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java +++ b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java @@ -905,7 +905,7 @@ private void checkAndPrepareMeta() { } tbl.writeLock(); try { - if (!db.createTable(tbl)) { + if (!db.addMemoryTable(tbl)) { status = new Status(ErrCode.COMMON_ERROR, "Table " + tbl.getName() + " already exist in db: " + db.getFullName()); return; @@ -1286,7 +1286,7 @@ private void replayCheckAndPrepareMeta() { db.writeLock(); restoreTbl.writeLock(); try { - db.createTable(restoreTbl); + db.addMemoryTable(restoreTbl); } finally { restoreTbl.writeUnlock(); db.writeUnlock(); @@ -1936,7 +1936,7 @@ public void cancelInternal(boolean isReplay) { } } } - db.dropTable(restoreTbl.getName()); + db.removeMemoryTable(restoreTbl.getName()); } finally { restoreTbl.writeUnlock(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java index 3ae630744540c5..d0107e25517685 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java @@ -644,7 +644,7 @@ private void recoverAllTables(RecycleDatabaseInfo dbInfo) throws DdlException { } Table table = tableInfo.getTable(); - db.createTable(table); + db.addMemoryTable(table); LOG.info("recover db[{}] with table[{}]: {}", dbId, table.getId(), table.getName()); iterator.remove(); idToRecycleTime.remove(table.getId()); @@ -739,7 +739,7 @@ private synchronized boolean innerRecoverTable(Database db, Table table, String } } - db.createTable(table); + db.addMemoryTable(table); if (isReplay) { iterator.remove(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java index c4475bc3c3eaf3..a1d4bda7454972 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java @@ -416,7 +416,7 @@ public Pair createTableWithLock( } } - public boolean createTable(Table table) { + public boolean addMemoryTable(Table table) { boolean result = true; table.setQualifiedDbName(fullQualifiedName); String tableName = table.getName(); @@ -434,7 +434,7 @@ public boolean createTable(Table table) { return result; } - public void dropTable(String tableName) { + public void removeMemoryTable(String tableName) { if (Env.isStoredTableNamesLowerCase()) { tableName = tableName.toLowerCase(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java index 454851620e2b16..bc751a95a04c8e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java @@ -263,7 +263,7 @@ default OlapTable getOlapTableOrAnalysisException(String tableName) throws Analy return (OlapTable) table; } - void dropTable(String tableName); + void removeMemoryTable(String tableName); CatalogIf getCatalog(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index 1df6995381f09a..f5f5ba5763745e 100755 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -4330,8 +4330,8 @@ public void renameTable(Database db, Table table, String newTableName) throws Dd table.setName(newTableName); } - db.dropTable(oldTableName); - db.createTable(table); + db.removeMemoryTable(oldTableName); + db.addMemoryTable(table); TableInfo tableInfo = TableInfo.createForTableRename(db.getId(), table.getId(), newTableName); editLog.logTableRename(tableInfo); @@ -4363,9 +4363,9 @@ public void replayRenameTable(TableInfo tableInfo) throws MetaNotFoundException table.writeLock(); try { String tableName = table.getName(); - db.dropTable(tableName); + db.removeMemoryTable(tableName); table.setName(newTableName); - db.createTable(table); + db.addMemoryTable(table); LOG.info("replay rename table[{}] to {}", tableName, newTableName); } finally { table.writeUnlock(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java index b72850b3e9493a..177f81643a82c8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java @@ -29,12 +29,12 @@ public InfoSchemaDb() { @Override protected void initTables() { for (Table table : SchemaTable.TABLE_MAP.values()) { - super.createTable(table); + super.addMemoryTable(table); } } @Override - public boolean createTable(Table table) { + public boolean addMemoryTable(Table table) { return false; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java index 12d915321ede50..57403310de7448 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java @@ -50,12 +50,12 @@ public MysqlCompatibleDatabase(long id, String name) { * @note: Rename a table of mysql database in MYSQL ls allowed. */ @Override - public boolean createTable(Table table) { - return super.createTable(table); + public boolean addMemoryTable(Table table) { + return super.addMemoryTable(table); } @Override - public void dropTable(String name) { + public void removeMemoryTable(String name) { // Do nothing } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java index 0e09995d94c0b7..d09b5942c21bcd 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java @@ -52,7 +52,7 @@ public MysqlDb() { public void initTables() {} @Override - public boolean createTable(Table table) { + public boolean addMemoryTable(Table table) { return false; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index 9cf1f40a07b736..d01a9c27618c21 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -745,7 +745,7 @@ public void replayRefreshExternalTable(ExternalObjectLog log) { } } - public void dropExternalTable(String dbName, String tableName, String catalogName, boolean ignoreIfExists) + public void unloadExternalTable(String dbName, String tableName, String catalogName, boolean ignoreIfExists) throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { @@ -772,7 +772,7 @@ public void dropExternalTable(String dbName, String tableName, String catalogNam db.writeLock(); try { - db.dropTable(table.getName()); + db.removeMemoryTable(table.getName()); Env.getCurrentEnv().getExtMetaCacheMgr().invalidateTableCache( catalog.getId(), db.getFullName(), table.getName()); ((HMSExternalDatabase) db).setLastUpdateTime(System.currentTimeMillis()); @@ -792,9 +792,9 @@ public boolean externalTableExistInLocal(String dbName, String tableName, String return ((ExternalCatalog) catalog).tableExistInLocal(dbName, tableName); } - public void createExternalTableFromEvent(String dbName, String tableName, - String catalogName, long updateTime, - boolean ignoreIfExists) throws DdlException { + public void loadExternalTableFromEvent(String dbName, String tableName, + String catalogName, long updateTime, + boolean ignoreIfExists) throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { throw new DdlException("No catalog found with name: " + catalogName); @@ -826,7 +826,7 @@ public void createExternalTableFromEvent(String dbName, String tableName, db.writeLock(); try { - ((HMSExternalDatabase) db).createTable(tableName, tblId); + ((HMSExternalDatabase) db).addMemoryTable(tableName, tblId); ((HMSExternalDatabase) db).setLastUpdateTime(System.currentTimeMillis()); table = db.getTableNullable(tableName); if (table != null) { @@ -837,7 +837,8 @@ public void createExternalTableFromEvent(String dbName, String tableName, } } - public void dropExternalDatabase(String dbName, String catalogName, boolean ignoreIfNotExists) throws DdlException { + public void removeExternalDatabase(String dbName, String catalogName, boolean ignoreIfNotExists) + throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { throw new DdlException("No catalog found with name: " + catalogName); @@ -853,11 +854,12 @@ public void dropExternalDatabase(String dbName, String catalogName, boolean igno return; } - ((HMSExternalCatalog) catalog).dropDatabase(dbName); + ((HMSExternalCatalog) catalog).removeDatabase(dbName); Env.getCurrentEnv().getExtMetaCacheMgr().invalidateDbCache(catalog.getId(), dbName); } - public void createExternalDatabase(String dbName, String catalogName, boolean ignoreIfExists) throws DdlException { + public void addExternalDatabase(String dbName, String catalogName, boolean ignoreIfExists) + throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { throw new DdlException("No catalog found with name: " + catalogName); @@ -879,7 +881,7 @@ public void createExternalDatabase(String dbName, String catalogName, boolean ig return; } - ((HMSExternalCatalog) catalog).createDatabase(dbId, dbName); + ((HMSExternalCatalog) catalog).addDatabase(dbId, dbName); } public void addExternalPartitions(String catalogName, String dbName, String tableName, diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index 03ad99ce77f96f..c71c13e4e83624 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -17,6 +17,10 @@ package org.apache.doris.datasource; +import org.apache.doris.analysis.CreateDbStmt; +import org.apache.doris.analysis.CreateTableStmt; +import org.apache.doris.analysis.DropDbStmt; +import org.apache.doris.analysis.DropTableStmt; import org.apache.doris.catalog.Column; import org.apache.doris.catalog.DatabaseIf; import org.apache.doris.catalog.Env; @@ -25,6 +29,7 @@ import org.apache.doris.catalog.TableIf; import org.apache.doris.cluster.ClusterNamespace; import org.apache.doris.common.DdlException; +import org.apache.doris.common.UserException; import org.apache.doris.common.io.Text; import org.apache.doris.common.io.Writable; import org.apache.doris.common.util.Util; @@ -578,11 +583,29 @@ public void addDatabaseForTest(ExternalDatabase db) { dbNameToId.put(ClusterNamespace.getNameFromFullName(db.getFullName()), db.getId()); } - public void dropDatabase(String dbName) { + public void createDb(CreateDbStmt stmt) throws DdlException { throw new NotImplementedException("dropDatabase not implemented"); } - public void createDatabase(long dbId, String dbName) { + public void dropDb(DropDbStmt stmt) throws DdlException { + throw new NotImplementedException("dropDatabase not implemented"); + } + + @Override + public void createTable(CreateTableStmt stmt) throws UserException { + throw new NotImplementedException("createTable not implemented"); + } + + @Override + public void dropTable(DropTableStmt stmt) throws DdlException { + throw new NotImplementedException("dropTable not implemented"); + } + + public void removeDatabase(String dbName) { + throw new NotImplementedException("dropDatabase not implemented"); + } + + public void addDatabase(long dbId, String dbName) { throw new NotImplementedException("createDatabase not implemented"); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java index a4f4afde7eb002..cddd32e89d1910 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java @@ -154,7 +154,7 @@ public void replayInitDb(InitDatabaseLog log, ExternalCatalog catalog) { } } for (int i = 0; i < log.getCreateCount(); i++) { - T table = getExternalTable(log.getCreateTableNames().get(i), log.getCreateTableIds().get(i), catalog); + T table = newExternalTable(log.getCreateTableNames().get(i), log.getCreateTableIds().get(i), catalog); tmpTableNameToId.put(table.getName(), table.getId()); tmpIdToTbl.put(table.getId(), table); } @@ -190,7 +190,7 @@ protected void init() { } else { tblId = Env.getCurrentEnv().getNextId(); tmpTableNameToId.put(tableName, tblId); - T table = getExternalTable(tableName, tblId, extCatalog); + T table = newExternalTable(tableName, tblId, extCatalog); tmpIdToTbl.put(tblId, table); initDatabaseLog.addCreateTable(tblId, tableName); } @@ -205,7 +205,7 @@ protected void init() { Env.getCurrentEnv().getEditLog().logInitExternalDb(initDatabaseLog); } - protected abstract T getExternalTable(String tableName, long tblId, ExternalCatalog catalog); + protected abstract T newExternalTable(String tableName, long tblId, ExternalCatalog catalog); public T getTableForReplay(long tableId) { return idToTbl.get(tableId); @@ -364,8 +364,8 @@ public void gsonPostProcess() throws IOException { } @Override - public void dropTable(String tableName) { - throw new NotImplementedException("dropTable() is not implemented"); + public void removeMemoryTable(String tableName) { + throw new NotImplementedException("removeMemoryTable is not implemented at external database"); } @Override @@ -374,8 +374,8 @@ public CatalogIf getCatalog() { } // Only used for sync hive metastore event - public void createTable(String tableName, long tableId) { - throw new NotImplementedException("createTable() is not implemented"); + public void addMemoryTable(String tableName, long tableId) { + throw new NotImplementedException("addMemoryTable is not implemented at external database."); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java index a8231d0014c11a..92e0c4ec0a6c39 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java @@ -41,7 +41,6 @@ import org.apache.doris.analysis.FunctionCallExpr; import org.apache.doris.analysis.HashDistributionDesc; import org.apache.doris.analysis.KeysDesc; -import org.apache.doris.analysis.LiteralExpr; import org.apache.doris.analysis.PartitionDesc; import org.apache.doris.analysis.PartitionKeyDesc; import org.apache.doris.analysis.QueryStmt; @@ -65,7 +64,6 @@ import org.apache.doris.catalog.DatabaseProperty; import org.apache.doris.catalog.DistributionInfo; import org.apache.doris.catalog.DistributionInfo.DistributionInfoType; -import org.apache.doris.catalog.DynamicPartitionProperty; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.EnvFactory; import org.apache.doris.catalog.EsTable; @@ -177,7 +175,6 @@ import lombok.Getter; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.time.StopWatch; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -653,7 +650,7 @@ public void recoverTable(RecoverTableStmt recoverStmt) throws DdlException { String tableName = recoverStmt.getTableName(); String newTableName = recoverStmt.getNewTableName(); - Database db = getDbOrDdlException(dbName); + Database db = (Database) getDbOrDdlException(dbName); db.writeLockOrDdlException(); try { if (Strings.isNullOrEmpty(newTableName)) { @@ -702,6 +699,10 @@ public void recoverPartition(RecoverPartitionStmt recoverStmt) throws DdlExcepti } } + public void replayEraseDatabase(long dbId) throws DdlException { + Env.getCurrentRecycleBin().replayEraseDatabase(dbId); + } + public void replayRecoverDatabase(RecoverInfo info) { long dbId = info.getDbId(); String newDbName = info.getNewDbName(); @@ -835,8 +836,6 @@ public void replayRenameDatabase(String dbName, String newDbName) { // Drop table public void dropTable(DropTableStmt stmt) throws DdlException { - Map costTimes = new TreeMap(); - StopWatch watch = StopWatch.createStarted(); String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); LOG.info("begin to drop table: {} from db: {}, is force: {}", tableName, dbName, stmt.isForceDrop()); @@ -848,8 +847,6 @@ public void dropTable(DropTableStmt stmt) throws DdlException { } db.writeLockOrDdlException(); - watch.split(); - costTimes.put("1:dbWriteLock", watch.getSplitTime()); try { Table table = db.getTableNullable(tableName); if (table == null) { @@ -885,12 +882,8 @@ public void dropTable(DropTableStmt stmt) throws DdlException { + "] cannot be dropped. If you want to forcibly drop(cannot be recovered)," + " please use \"DROP table FORCE\"."); } - watch.split(); - costTimes.put("2:existCommittedTxns", watch.getSplitTime()); } table.writeLock(); - watch.split(); - costTimes.put("3:tableWriteLock", watch.getSplitTime()); long recycleTime = 0; try { if (table instanceof OlapTable && !stmt.isForceDrop()) { @@ -906,12 +899,8 @@ public void dropTable(DropTableStmt stmt) throws DdlException { Env.getCurrentEnv().getMtmvService().dropMTMV((MTMV) table); } unprotectDropTable(db, table, stmt.isForceDrop(), false, 0); - watch.split(); - costTimes.put("4:unprotectDropTable", watch.getSplitTime()); if (!stmt.isForceDrop()) { recycleTime = Env.getCurrentRecycleBin().getRecycleTimeById(table.getId()); - watch.split(); - costTimes.put("5:getRecycleTimeById", watch.getSplitTime()); } } finally { table.writeUnlock(); @@ -931,10 +920,7 @@ public void dropTable(DropTableStmt stmt) throws DdlException { } finally { db.writeUnlock(); } - watch.stop(); - costTimes.put("6:total", watch.getTime()); - LOG.info("finished dropping table: {} from db: {}, is force: {} cost: {}", - tableName, dbName, stmt.isForceDrop(), costTimes); + LOG.info("finished dropping table: {} from db: {}, is force: {}", tableName, dbName, stmt.isForceDrop()); } public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, boolean isReplay, @@ -949,12 +935,9 @@ public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, Env.getCurrentEnv().getMtmvService().deregisterMTMV((MTMV) table); } - db.dropTable(table.getName()); - StopWatch watch = StopWatch.createStarted(); + db.removeMemoryTable(table.getName()); Env.getCurrentRecycleBin().recycleTable(db.getId(), table, isReplay, isForceDrop, recycleTime); - watch.stop(); - LOG.info("finished dropping table[{}] in db[{}] recycleTable cost: {}ms", - table.getName(), db.getFullName(), watch.getTime()); + LOG.info("finished dropping table[{}] in db[{}]", table.getName(), db.getFullName()); return true; } @@ -1265,11 +1248,6 @@ public void createTableAsSelect(CreateTableAsSelectStmt stmt) throws DdlExceptio default: throw new DdlException("Unsupported string type for ctas"); } - if (resultExpr.getSrcSlotRef() != null - && resultExpr.getSrcSlotRef().getTable() != null - && !resultExpr.getSrcSlotRef().getTable().isManagedTable()) { - typeDef = new TypeDef(ScalarType.createStringType()); - } } else if (resultType.isDecimalV2() && resultType.equals(ScalarType.DECIMALV2)) { typeDef = new TypeDef(ScalarType.createDecimalType(27, 9)); } else if (resultType.isDecimalV3()) { @@ -1813,12 +1791,10 @@ public void dropPartition(Database db, OlapTable olapTable, DropPartitionClause recycleTime = Env.getCurrentRecycleBin().getRecycleTimeById(partition.getId()); } } - long version = olapTable.getNextVersion(); - long versionTime = System.currentTimeMillis(); - olapTable.updateVisibleVersionAndTime(version, versionTime); + // log DropPartitionInfo info = new DropPartitionInfo(db.getId(), olapTable.getId(), partitionName, isTempPartition, - clause.isForceDrop(), recycleTime, version, versionTime); + clause.isForceDrop(), recycleTime); Env.getCurrentEnv().getEditLog().logDropPartition(info); LOG.info("succeed in dropping partition[{}], table : [{}-{}], is temp : {}, is force : {}", @@ -1839,7 +1815,6 @@ public void replayDropPartition(DropPartitionInfo info) throws MetaNotFoundExcep Env.getCurrentRecycleBin().setRecycleTimeByIdForReplay(partition.getId(), info.getRecycleTime()); } } - olapTable.updateVisibleVersionAndTime(info.getVersion(), info.getVersionTime()); } finally { olapTable.writeUnlock(); } @@ -2026,7 +2001,7 @@ protected void afterCreatePartitions(long tableId, List partitionIds, List throws DdlException { } - public void checkAvailableCapacity(Database db) throws DdlException { + protected void checkAvailableCapacity(Database db) throws DdlException { // check cluster capacity Env.getCurrentSystemInfo().checkAvailableCapacity(); // check db quota @@ -2497,7 +2472,7 @@ private void createOlapTable(Database db, CreateTableStmt stmt) throws UserExcep if (!col.getType().isFixedPointType() && !col.getType().isDateType()) { throw new DdlException("Sequence type only support integer types and date types"); } - olapTable.setSequenceMapCol(col.getName()); + olapTable.setSequenceMapCol(sequenceMapCol); olapTable.setSequenceInfo(col.getType()); } } catch (Exception e) { @@ -2616,27 +2591,7 @@ private void createOlapTable(Database db, CreateTableStmt stmt) throws UserExcep "Only support dynamic partition properties on range partition table"); } } - // check the interval same between dynamic & auto range partition - DynamicPartitionProperty dynamicProperty = olapTable.getTableProperty() - .getDynamicPartitionProperty(); - if (dynamicProperty.isExist() && dynamicProperty.getEnable() - && partitionDesc.isAutoCreatePartitions()) { - String dynamicUnit = dynamicProperty.getTimeUnit(); - ArrayList autoExprs = partitionDesc.getPartitionExprs(); - for (Expr autoExpr : autoExprs) { - Expr func = (FunctionCallExpr) autoExpr; - for (Expr child : func.getChildren()) { - if (child instanceof LiteralExpr) { - String autoUnit = ((LiteralExpr) child).getStringValue(); - if (!dynamicUnit.equalsIgnoreCase(autoUnit)) { - throw new AnalysisException( - "If support auto partition and dynamic partition at same time, " - + "they must have the same interval unit."); - } - } - } - } - } + } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/es/EsExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/es/EsExternalDatabase.java index 7d576bba1b01c7..cf4129f25a5cf6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/es/EsExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/es/EsExternalDatabase.java @@ -38,7 +38,7 @@ public EsExternalDatabase(ExternalCatalog extCatalog, long id, String name) { } @Override - protected EsExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected EsExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new EsExternalTable(tblId, tableName, name, (EsExternalCatalog) extCatalog); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java index afbbd36f02431e..0aa6b592ebe54b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java @@ -17,16 +17,24 @@ package org.apache.doris.datasource.hive; +import org.apache.doris.analysis.CreateDbStmt; +import org.apache.doris.analysis.CreateTableStmt; +import org.apache.doris.analysis.DropDbStmt; +import org.apache.doris.analysis.DropTableStmt; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.HdfsResource; import org.apache.doris.cluster.ClusterNamespace; import org.apache.doris.common.Config; import org.apache.doris.common.DdlException; +import org.apache.doris.common.UserException; import org.apache.doris.common.security.authentication.AuthenticationConfig; import org.apache.doris.common.security.authentication.HadoopUGI; +import org.apache.doris.common.util.QueryableReentrantLock; +import org.apache.doris.common.util.Util; import org.apache.doris.datasource.CatalogProperty; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; +import org.apache.doris.datasource.ExternalMetaIdMgr; import org.apache.doris.datasource.ExternalTable; import org.apache.doris.datasource.InitCatalogLog; import org.apache.doris.datasource.SessionContext; @@ -38,19 +46,21 @@ import com.google.common.collect.Lists; import org.apache.commons.lang3.math.NumberUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.concurrent.TimeUnit; /** * External catalog for hive metastore compatible data sources. */ public class HMSExternalCatalog extends ExternalCatalog { private static final Logger LOG = LogManager.getLogger(HMSExternalCatalog.class); - private static final int MIN_CLIENT_POOL_SIZE = 8; protected HMSCachedClient client; @@ -63,6 +73,7 @@ public class HMSExternalCatalog extends ExternalCatalog { public static final int FILE_META_CACHE_NO_TTL = -1; // 0 means file cache is disabled; >0 means file cache with ttl; public static final int FILE_META_CACHE_TTL_DISABLE_CACHE = 0; + private QueryableReentrantLock lock = new QueryableReentrantLock(true); public HMSExternalCatalog() { catalogProperty = new CatalogProperty(null, null); @@ -216,6 +227,143 @@ public void notifyPropertiesUpdated(Map updatedProps) { } } + private boolean tryLock(boolean mustLock) { + while (true) { + try { + if (!lock.tryLock(Config.catalog_try_lock_timeout_ms, TimeUnit.MILLISECONDS)) { + // to see which thread held this lock for long time. + Thread owner = lock.getOwner(); + if (owner != null) { + // There are many catalog timeout during regression test + // And this timeout should not happen very often, so it could be info log + LOG.info("catalog lock is held by: {}", Util.dumpThread(owner, 10)); + } + + if (mustLock) { + continue; + } else { + return false; + } + } + return true; + } catch (InterruptedException e) { + LOG.warn("got exception while getting catalog lock", e); + if (mustLock) { + continue; + } else { + return lock.isHeldByCurrentThread(); + } + } + } + } + + private void unlock() { + if (lock.isHeldByCurrentThread()) { + this.lock.unlock(); + } + } + + @Override + public void createDb(CreateDbStmt stmt) throws DdlException { + String fullDbName = stmt.getFullDbName(); + Map properties = stmt.getProperties(); + long id = Env.getCurrentEnv().getNextId(); + + if (!tryLock(false)) { + throw new DdlException("Failed to acquire catalog lock. Try again"); + } + try { + HiveCatalogDatabase catalogDatabase = new HiveCatalogDatabase(); + catalogDatabase.setDbName(fullDbName); + catalogDatabase.setProperties(properties); + if (properties.containsKey("location_uri")) { + catalogDatabase.setLocationUri(properties.get("location_uri")); + } + catalogDatabase.setComment(properties.getOrDefault("comment", "")); + client.createDatabase(catalogDatabase); + addDatabase(id, fullDbName); + } finally { + unlock(); + } + LOG.info("createDb dbName = " + fullDbName + ", id = " + id); + } + + public void dropDb(DropDbStmt stmt) throws DdlException { + if (!tryLock(false)) { + throw new DdlException("Failed to acquire catalog lock. Try again"); + } + try { + client.dropDatabase(stmt.getDbName()); + removeDatabase(stmt.getDbName()); + } finally { + unlock(); + } + } + + @Override + public void createTable(CreateTableStmt stmt) throws UserException { + if (!tryLock(false)) { + throw new DdlException("Failed to acquire catalog lock. Try again"); + } + String dbName = stmt.getDbName(); + String tblName = stmt.getTableName(); + ExternalDatabase db = getDbNullable(dbName); + if (db == null) { + throw new UserException("Failed to get database: '" + dbName + "' in catalog: " + this.getName()); + } + try { + HiveCatalogTable catalogTable = new HiveCatalogTable(); + catalogTable.setDbName(dbName); + catalogTable.setTableName(tblName); + Map props = stmt.getExtProperties(); + catalogTable.setProperties(props); + String inputFormat = props.getOrDefault("input_format", + "org.apache.hadoop.mapred.TextInputFormat"); + String outputFormat = props.getOrDefault("output_format", + "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); + catalogTable.setInputFormat(inputFormat); + catalogTable.setOutputFormat(outputFormat); + catalogTable.setPartitionKeys(parsePartitionKeys(props)); + client.createTable(catalogTable, stmt.isSetIfNotExists()); + long tableId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(getId(), dbName, tblName); + if (tableId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { + return; + } + db.addMemoryTable(tblName, tableId); + } finally { + unlock(); + } + } + + private static List parsePartitionKeys(Map props) { + List parsedKeys = new ArrayList<>(); + String pkStr = props.getOrDefault("partition_keys", ""); + if (pkStr.isEmpty()) { + return parsedKeys; + } else { + // TODO: parse string to partition keys list + return parsedKeys; + } + } + + @Override + public void dropTable(DropTableStmt stmt) throws DdlException { + if (!tryLock(false)) { + throw new DdlException("Failed to acquire catalog lock. Try again"); + } + String dbName = stmt.getDbName(); + ExternalDatabase db = getDbNullable(stmt.getDbName()); + if (db == null) { + throw new DdlException("Failed to get database: '" + dbName + "' in catalog: " + this.getName()); + } + try { + client.dropTable(dbName, stmt.getTableName()); + db.removeMemoryTable(stmt.getTableName()); + } finally { + unlock(); + } + } + @Override public void setDefaultPropsWhenCreating(boolean isReplay) { if (isReplay) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java index 2287909f26a84a..ee9b033c256820 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java @@ -42,7 +42,7 @@ public HMSExternalDatabase(ExternalCatalog extCatalog, long id, String name) { } @Override - protected HMSExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected HMSExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new HMSExternalTable(tblId, tableName, name, (HMSExternalCatalog) extCatalog); } @@ -52,10 +52,11 @@ public void addTableForTest(HMSExternalTable tbl) { } @Override - public void dropTable(String tableName) { + public void removeMemoryTable(String tableName) { if (LOG.isDebugEnabled()) { LOG.debug("replayDropTableFromEvent [{}]", tableName); } + LOG.debug("replayDropTableFromEvent [{}]", tableName); Long tableId = tableNameToId.remove(tableName); if (tableId == null) { LOG.warn("replayDropTableFromEvent [{}] failed", tableName); @@ -65,12 +66,11 @@ public void dropTable(String tableName) { } @Override - public void createTable(String tableName, long tableId) { + public void addMemoryTable(String tableName, long tableId) { if (LOG.isDebugEnabled()) { LOG.debug("create table [{}]", tableName); } tableNameToId.put(tableName, tableId); - HMSExternalTable table = getExternalTable(tableName, tableId, extCatalog); - idToTbl.put(tableId, table); + idToTbl.put(tableId, newExternalTable(tableName, tableId, extCatalog)); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java index 0d3cc2edc41f91..79433197332167 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java @@ -87,8 +87,8 @@ private void processRename() throws DdlException { catalogName, dbAfter.getName()); return; } - Env.getCurrentEnv().getCatalogMgr().dropExternalDatabase(dbBefore.getName(), catalogName, true); - Env.getCurrentEnv().getCatalogMgr().createExternalDatabase(dbAfter.getName(), catalogName, true); + Env.getCurrentEnv().getCatalogMgr().removeExternalDatabase(dbBefore.getName(), catalogName, true); + Env.getCurrentEnv().getCatalogMgr().addExternalDatabase(dbAfter.getName(), catalogName, true); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java index c69812a22b30db..89d5c4a65270a9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java @@ -98,9 +98,9 @@ private void processRecreateTable() throws DdlException { return; } Env.getCurrentEnv().getCatalogMgr() - .dropExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); + .unloadExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); Env.getCurrentEnv().getCatalogMgr() - .createExternalTableFromEvent( + .loadExternalTableFromEvent( tableAfter.getDbName(), tableAfter.getTableName(), catalogName, eventTime, true); } @@ -117,9 +117,9 @@ private void processRename() throws DdlException { return; } Env.getCurrentEnv().getCatalogMgr() - .dropExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); + .unloadExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); Env.getCurrentEnv().getCatalogMgr() - .createExternalTableFromEvent( + .loadExternalTableFromEvent( tableAfter.getDbName(), tableAfter.getTableName(), catalogName, eventTime, true); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java index d79d23824ab163..2908cd3286707b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java @@ -55,8 +55,7 @@ protected static List getEvents(NotificationEvent event, protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}]", catalogName, dbName); - Env.getCurrentEnv().getCatalogMgr() - .createExternalDatabase(dbName, catalogName, true); + Env.getCurrentEnv().getCatalogMgr().addExternalDatabase(dbName, catalogName, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java index 246ce8626f4aca..2d8967dbaf5f04 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java @@ -80,7 +80,7 @@ protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}],tableName:[{}]", catalogName, dbName, tblName); Env.getCurrentEnv().getCatalogMgr() - .createExternalTableFromEvent(dbName, hmsTbl.getTableName(), catalogName, eventTime, true); + .loadExternalTableFromEvent(dbName, hmsTbl.getTableName(), catalogName, eventTime, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java index ca69e6f14d015a..f75d3f25888d89 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java @@ -55,7 +55,7 @@ protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}]", catalogName, dbName); Env.getCurrentEnv().getCatalogMgr() - .dropExternalDatabase(dbName, catalogName, true); + .removeExternalDatabase(dbName, catalogName, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java index 0f62e2460820ad..335d6e312e2335 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java @@ -78,7 +78,7 @@ protected boolean willChangeTableName() { protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}],tableName:[{}]", catalogName, dbName, tableName); - Env.getCurrentEnv().getCatalogMgr().dropExternalTable(dbName, tableName, catalogName, true); + Env.getCurrentEnv().getCatalogMgr().unloadExternalTable(dbName, tableName, catalogName, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java index 5f1fb6914677f4..8ea86fdf904ef0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java @@ -17,6 +17,7 @@ package org.apache.doris.datasource.iceberg; +import org.apache.doris.catalog.TableIf; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.InitDatabaseLog; @@ -24,6 +25,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; + public class IcebergExternalDatabase extends ExternalDatabase { private static final Logger LOG = LogManager.getLogger(IcebergExternalDatabase.class); @@ -33,12 +38,17 @@ public IcebergExternalDatabase(ExternalCatalog extCatalog, Long id, String name) } @Override - protected IcebergExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected IcebergExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new IcebergExternalTable(tblId, tableName, name, (IcebergExternalCatalog) extCatalog); } + public List getTablesOnIdOrder() { + // Sort the name instead, because the id may change. + return getTables().stream().sorted(Comparator.comparing(TableIf::getName)).collect(Collectors.toList()); + } + @Override - public void dropTable(String tableName) { + public void removeMemoryTable(String tableName) { if (LOG.isDebugEnabled()) { LOG.debug("drop table [{}]", tableName); } @@ -50,7 +60,7 @@ public void dropTable(String tableName) { } @Override - public void createTable(String tableName, long tableId) { + public void addMemoryTable(String tableName, long tableId) { if (LOG.isDebugEnabled()) { LOG.debug("create table [{}]", tableName); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/infoschema/ExternalInfoSchemaDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/infoschema/ExternalInfoSchemaDatabase.java index 843083076e8f9a..d22b57dc907b1f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/infoschema/ExternalInfoSchemaDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/infoschema/ExternalInfoSchemaDatabase.java @@ -44,7 +44,7 @@ public static List listTableNames() { } @Override - protected ExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected ExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new ExternalInfoSchemaTable(tblId, tableName, catalog); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/JdbcExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/JdbcExternalDatabase.java index 29d95038c2ee5c..0bd39f8e3ed243 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/JdbcExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/JdbcExternalDatabase.java @@ -35,7 +35,7 @@ public JdbcExternalDatabase(ExternalCatalog extCatalog, long id, String name) { } @Override - protected JdbcExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected JdbcExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new JdbcExternalTable(tblId, tableName, name, (JdbcExternalCatalog) extCatalog); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/MaxComputeExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/MaxComputeExternalDatabase.java index 98bb28aecbf080..750f1ecf55518e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/MaxComputeExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/MaxComputeExternalDatabase.java @@ -37,7 +37,7 @@ public MaxComputeExternalDatabase(ExternalCatalog extCatalog, long id, String na } @Override - protected MaxComputeExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected MaxComputeExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new MaxComputeExternalTable(tblId, tableName, name, (MaxComputeExternalCatalog) extCatalog); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java index b6d39da573df48..e4c33de60ec858 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java @@ -33,12 +33,17 @@ public PaimonExternalDatabase(ExternalCatalog extCatalog, Long id, String name) } @Override - protected PaimonExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected PaimonExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new PaimonExternalTable(tblId, tableName, name, (PaimonExternalCatalog) extCatalog); } + public List getTablesOnIdOrder() { + // Sort the name instead, because the id may change. + return getTables().stream().sorted(Comparator.comparing(TableIf::getName)).collect(Collectors.toList()); + } + @Override - public void dropTable(String tableName) { + public void removeMemoryTable(String tableName) { if (LOG.isDebugEnabled()) { LOG.debug("drop table [{}]", tableName); } @@ -50,7 +55,7 @@ public void dropTable(String tableName) { } @Override - public void createTable(String tableName, long tableId) { + public void addMemoryTable(String tableName, long tableId) { if (LOG.isDebugEnabled()) { LOG.debug("create table [{}]", tableName); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/test/TestExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/test/TestExternalDatabase.java index 2a797a726e9341..cb9707611f83c3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/test/TestExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/test/TestExternalDatabase.java @@ -28,7 +28,7 @@ public TestExternalDatabase(ExternalCatalog extCatalog, long id, String name) { } @Override - protected TestExternalTable getExternalTable(String tableName, long tblId, ExternalCatalog catalog) { + protected TestExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new TestExternalTable(tblId, tableName, name, (TestExternalCatalog) extCatalog); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java index 75aa41388961ce..47b731b602f575 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java @@ -49,7 +49,6 @@ import org.apache.doris.common.util.InternalDatabaseUtil; import org.apache.doris.common.util.ParseUtil; import org.apache.doris.common.util.PropertyAnalyzer; -import org.apache.doris.common.util.Util; import org.apache.doris.datasource.InternalCatalog; import org.apache.doris.datasource.es.EsUtil; import org.apache.doris.mysql.privilege.PrivPredicate; @@ -254,13 +253,6 @@ public void validate(ConnectContext ctx) { } } - // disallow external catalog - try { - Util.prohibitExternalCatalog(ctlName, this.getClass().getSimpleName()); - } catch (Exception ex) { - throw new AnalysisException(ex.getMessage(), ex.getCause()); - } - // analyze table name if (Strings.isNullOrEmpty(dbName)) { dbName = ctx.getDatabase(); diff --git a/fe/fe-core/src/test/java/org/apache/doris/analysis/AccessTestUtil.java b/fe/fe-core/src/test/java/org/apache/doris/analysis/AccessTestUtil.java index f29df419e3427e..1920d34675fe2e 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/analysis/AccessTestUtil.java +++ b/fe/fe-core/src/test/java/org/apache/doris/analysis/AccessTestUtil.java @@ -124,7 +124,7 @@ public static Env fetchAdminCatalog() { KeysType.AGG_KEYS); table.addPartition(partition); table.setBaseIndexId(baseIndex.getId()); - db.createTable(table); + db.addMemoryTable(table); InternalCatalog catalog = Deencapsulation.newInstance(InternalCatalog.class); new Expectations(catalog) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/backup/CatalogMocker.java b/fe/fe-core/src/test/java/org/apache/doris/backup/CatalogMocker.java index 906f82e703679d..7810b5917b2e6e 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/backup/CatalogMocker.java +++ b/fe/fe-core/src/test/java/org/apache/doris/backup/CatalogMocker.java @@ -263,7 +263,7 @@ public static Database mockDb() throws UserException { olapTable.setIndexMeta(TEST_TBL_ID, TEST_TBL_NAME, TEST_TBL_BASE_SCHEMA, 0, SCHEMA_HASH, (short) 1, TStorageType.COLUMN, KeysType.AGG_KEYS); olapTable.addPartition(partition); - db.createTable(olapTable); + db.addMemoryTable(olapTable); // 2. mysql table Map mysqlProp = Maps.newHashMap(); @@ -279,7 +279,7 @@ public static Database mockDb() throws UserException { } catch (DdlException e) { e.printStackTrace(); } - db.createTable(mysqlTable); + db.addMemoryTable(mysqlTable); // 3. range partition olap table MaterializedIndex baseIndexP1 = new MaterializedIndex(TEST_TBL2_ID, IndexState.NORMAL); @@ -387,7 +387,7 @@ public static Database mockDb() throws UserException { olapTable2.setIndexMeta(TEST_ROLLUP_ID, TEST_ROLLUP_NAME, TEST_ROLLUP_SCHEMA, 0, ROLLUP_SCHEMA_HASH, (short) 1, TStorageType.COLUMN, KeysType.AGG_KEYS); - db.createTable(olapTable2); + db.addMemoryTable(olapTable2); return db; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java b/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java index d361777fdd56c3..7079f727a6d90e 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java @@ -244,7 +244,7 @@ boolean await(long timeout, TimeUnit unit) { } // drop this table, cause we want to try restoring this table - db.dropTable(expectedRestoreTbl.getName()); + db.removeMemoryTable(expectedRestoreTbl.getName()); job = new RestoreJob(label, "2018-01-01 01:01:01", db.getId(), db.getFullName(), jobInfo, false, new ReplicaAllocation((short) 3), 100000, -1, false, false, false, env, repo.getId()); diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java index 9d35bbcd7a2698..5aa0a9ad17320d 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java @@ -230,7 +230,7 @@ public static Database createSimpleDb(long dbId, long tableId, long partitionId, table.setBaseIndexId(indexId); // db Database db = new Database(dbId, testDb1); - db.createTable(table); + db.addMemoryTable(table); // add a es table to catalog try { @@ -288,7 +288,7 @@ public static void createDupTable(Database db) { TStorageType.COLUMN, KeysType.DUP_KEYS); table.setBaseIndexId(testIndexId2); // db - db.createTable(table); + db.addMemoryTable(table); } public static void createEsTable(Database db) throws DdlException { @@ -319,7 +319,7 @@ public static void createEsTable(Database db) throws DdlException { properties.put(EsResource.KEYWORD_SNIFF, "true"); EsTable esTable = new EsTable(testEsTableId1, testEsTable1, columns, properties, partitionInfo); - db.createTable(esTable); + db.addMemoryTable(esTable); } public static Backend createBackend(long id, String host, int heartPort, int bePort, int httpPort) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java index 3f8e2eb08ae803..f18b195f61dd18 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java @@ -122,8 +122,8 @@ public void getTablesOnIdOrderOrThrowExceptionTest() throws MetaNotFoundExceptio List baseSchema2 = new LinkedList<>(); OlapTable table2 = new OlapTable(2001L, "baseTable2", baseSchema2, KeysType.DUP_KEYS, new SinglePartitionInfo(), new RandomDistributionInfo(10)); - db.createTable(table1); - db.createTable(table2); + db.addMemoryTable(table1); + db.addMemoryTable(table2); List tableIdList = Lists.newArrayList(2001L, 2000L); List tableList = db.getTablesOnIdOrderOrThrowException(tableIdList); Assert.assertEquals(2, tableList.size()); @@ -138,7 +138,7 @@ public void getTableOrThrowExceptionTest() throws MetaNotFoundException { List baseSchema = new LinkedList<>(); OlapTable table = new OlapTable(2000L, "baseTable", baseSchema, KeysType.AGG_KEYS, new SinglePartitionInfo(), new RandomDistributionInfo(10)); - db.createTable(table); + db.addMemoryTable(table); Table resultTable1 = db.getTableOrMetaException(2000L, Table.TableType.OLAP); Table resultTable2 = db.getTableOrMetaException("baseTable", Table.TableType.OLAP); Assert.assertEquals(table, resultTable1); @@ -168,9 +168,9 @@ public void createAndDropPartitionTest() { table.addPartition(partition); // create - Assert.assertTrue(db.createTable(table)); + Assert.assertTrue(db.addMemoryTable(table)); // duplicate - Assert.assertFalse(db.createTable(table)); + Assert.assertFalse(db.addMemoryTable(table)); Assert.assertEquals(table, db.getTableNullable(table.getId())); Assert.assertEquals(table, db.getTableNullable(table.getName())); @@ -185,11 +185,11 @@ public void createAndDropPartitionTest() { // drop // drop not exist tableFamily - db.dropTable("invalid"); + db.removeMemoryTable("invalid"); Assert.assertEquals(1, db.getTables().size()); - db.createTable(table); - db.dropTable(table.getName()); + db.addMemoryTable(table); + db.removeMemoryTable(table.getName()); Assert.assertEquals(0, db.getTables().size()); } @@ -234,7 +234,7 @@ public void testSerialization() throws Exception { table.setIndexMeta(1L, "test", column, 1, 1, shortKeyColumnCount, TStorageType.COLUMN, KeysType.AGG_KEYS); Deencapsulation.setField(table, "baseIndexId", 1); table.addPartition(partition); - db2.createTable(table); + db2.addMemoryTable(table); db2.write(dos); dos.flush(); diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java index 9abb1322e6dc54..6b78249babd8f7 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java @@ -29,9 +29,9 @@ public class InfoSchemaDbTest { public void testNormal() throws IOException, DdlException { Database db = new InfoSchemaDb(); - Assert.assertFalse(db.createTable(null)); + Assert.assertFalse(db.addMemoryTable(null)); Assert.assertFalse(db.createTableWithLock(null, false, false).first); - db.dropTable("authors"); + db.removeMemoryTable("authors"); Assert.assertThrows(IOException.class, () -> db.write(null)); Assert.assertNull(db.getTableNullable("authors")); } diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java index 788780202b44d8..941f96875ec985 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java @@ -29,9 +29,9 @@ public class MysqlDbTest { public void testNormal() throws IOException, DdlException { Database db = new MysqlDb(); - Assert.assertFalse(db.createTable(null)); + Assert.assertFalse(db.addMemoryTable(null)); Assert.assertFalse(db.createTableWithLock(null, false, false).first); - db.dropTable("authors"); + db.removeMemoryTable("authors"); Assert.assertThrows(IOException.class, () -> db.write(null)); Assert.assertNull(db.getTableNullable("authors")); } diff --git a/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java b/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java index 1115a478b15a80..64cbd93ae5e516 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java @@ -171,7 +171,7 @@ public void testDiskRebalancerWithSameUsageDisk() { olapTable = new OlapTable(2, "fake table", new ArrayList<>(), KeysType.DUP_KEYS, new RangePartitionInfo(), new HashDistributionInfo()); - db.createTable(olapTable); + db.addMemoryTable(olapTable); // 1 table, 3 partitions p0,p1,p2 MaterializedIndex materializedIndex = new MaterializedIndex(olapTable.getId(), null); @@ -214,7 +214,7 @@ public void testDiskRebalancerWithDiffUsageDisk() { olapTable = new OlapTable(2, "fake table", new ArrayList<>(), KeysType.DUP_KEYS, new RangePartitionInfo(), new HashDistributionInfo()); - db.createTable(olapTable); + db.addMemoryTable(olapTable); // 1 table, 3 partitions p0,p1,p2 MaterializedIndex materializedIndex = new MaterializedIndex(olapTable.getId(), null); diff --git a/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java b/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java index 613510c0f58b8b..35c6a47701c6d0 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java @@ -152,7 +152,7 @@ long ignored() { olapTable = new OlapTable(2, "fake table", new ArrayList<>(), KeysType.DUP_KEYS, new RangePartitionInfo(), new HashDistributionInfo()); - db.createTable(olapTable); + db.addMemoryTable(olapTable); // 1 table, 3 partitions p0,p1,p2 MaterializedIndex materializedIndex = new MaterializedIndex(olapTable.getId(), null); diff --git a/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java b/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java index b3ae0afe8bf698..00ce1be8d49c36 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java +++ b/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java @@ -123,7 +123,7 @@ public static Database createDb(long dbId, long tableId, long partitionId, long // db Database db = new Database(dbId, DB_NAME); - db.createTable(table); + db.addMemoryTable(table); return db; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java b/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java index d801f55fc7cfa3..edeb764c6af7ee 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java +++ b/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java @@ -216,11 +216,11 @@ private static Env newDelegateCatalog() { //EasyMock.expect(catalog.getAuth()).andReturn(paloAuth).anyTimes(); Database db = new Database(testDbId, "testDb"); OlapTable table = newTable(TABLE_NAME); - db.createTable(table); + db.addMemoryTable(table); OlapTable table1 = newTable(TABLE_NAME + 1); - db.createTable(table1); + db.addMemoryTable(table1); EsTable esTable = newEsTable("es_table"); - db.createTable(esTable); + db.addMemoryTable(esTable); InternalCatalog internalCatalog = Deencapsulation.newInstance(InternalCatalog.class); new Expectations(internalCatalog) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java b/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java index 182596c3b63d1c..4b322d92058906 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java @@ -268,22 +268,22 @@ Env getCurrentEnv() { db = ((InternalCatalog) env.getCurrentCatalog()).getDbNullable(fullDbName); // table and view init use analyzer, should init after analyzer build OlapTable tbl1 = createOrderTable(); - db.createTable(tbl1); + db.addMemoryTable(tbl1); OlapTable tbl2 = createProfileTable(); - db.createTable(tbl2); + db.addMemoryTable(tbl2); OlapTable tbl3 = createEventTable(); - db.createTable(tbl3); + db.addMemoryTable(tbl3); // build view meta inline sql and create view directly, the originStmt from inline sql // should be analyzed by create view statement analyzer and then to sql View view1 = createEventView1(); - db.createTable(view1); + db.addMemoryTable(view1); View view2 = createEventView2(); - db.createTable(view2); + db.addMemoryTable(view2); View view3 = createEventView3(); - db.createTable(view3); + db.addMemoryTable(view3); View view4 = createEventNestedView(); - db.createTable(view4); + db.addMemoryTable(view4); } private OlapTable createOrderTable() { From 91a4ab66bb6c793f8faf3e6b555e71802b653968 Mon Sep 17 00:00:00 2001 From: wuwenchi Date: Tue, 23 Jan 2024 15:34:53 +0800 Subject: [PATCH 02/16] support for iceberg catalog(hms/hadoop/rest/glu) --- .../datasource/hive/HMSCachedClient.java | 2 + .../hive/PostgreSQLJdbcHMSCachedClient.java | 24 +++ .../hive/ThriftHMSCachedClient.java | 198 ++++++++++++++---- .../iceberg/IcebergExternalCatalog.java | 64 ++++++ .../iceberg/IcebergHMSExternalCatalog.java | 37 +++- .../datasource/iceberg/IcebergUtils.java | 147 ++++++------- .../iceberg/util/DorisTypeToType.java | 139 ++++++++++++ 7 files changed, 482 insertions(+), 129 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeToType.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java index 6e7f45aaa415c6..e083e43f898176 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java @@ -78,4 +78,6 @@ NotificationEventResponse getNextNotification(long lastEventId, void acquireSharedLock(String queryId, long txnId, String user, TableName tblName, List partitionNames, long timeoutMs); + + String getCatalogLocation(String catalogName); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java index 174a15221b84a2..69ef45b0f6847f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java @@ -20,6 +20,8 @@ import org.apache.doris.analysis.TableName; import org.apache.doris.catalog.JdbcTable; import org.apache.doris.catalog.Type; +import org.apache.doris.datasource.CatalogDatabase; +import org.apache.doris.datasource.CatalogTable; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; import org.apache.doris.thrift.TOdbcTableType; @@ -29,6 +31,7 @@ import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import org.apache.commons.lang3.NotImplementedException; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.metastore.IMetaStoreClient.NotificationFilter; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; @@ -503,4 +506,25 @@ protected String getDatabaseQuery() { protected Type jdbcTypeToDoris(JdbcFieldSchema fieldSchema) { throw new HMSClientException("Do not support in PostgreSQLJdbcHMSCachedClient."); } + + public void createDatabase(CatalogDatabase database) { + throw new NotImplementedException("PostgreSQL createDatabase not implemented"); + } + + public void dropDatabase(String dbName) { + throw new NotImplementedException("PostgreSQL dropDatabase not implemented"); + } + + public void createTable(CatalogTable hiveTable, boolean ignoreIfExists) { + throw new NotImplementedException("PostgreSQL createTable not implemented"); + } + + public void dropTable(String dbName, String tblName) { + throw new NotImplementedException("PostgreSQL dropTable not implemented"); + } + + @Override + public String getCatalogLocation(String catalogName) { + throw new HMSClientException("Do not support in PostgreSQLJdbcHMSCachedClient."); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index f7f61a2e94956e..b0d21be084b2d4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -19,6 +19,8 @@ import org.apache.doris.analysis.TableName; import org.apache.doris.common.Config; +import org.apache.doris.datasource.CatalogDatabase; +import org.apache.doris.datasource.CatalogTable; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; import org.apache.doris.datasource.property.constants.HMSProperties; @@ -26,6 +28,7 @@ import com.amazonaws.glue.catalog.metastore.AWSCatalogMetastoreClient; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; +import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hive.common.ValidReaderWriteIdList; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; @@ -38,6 +41,7 @@ import org.apache.hadoop.hive.metastore.LockComponentBuilder; import org.apache.hadoop.hive.metastore.LockRequestBuilder; import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.Catalog; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; import org.apache.hadoop.hive.metastore.api.DataOperationType; @@ -49,15 +53,16 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.TableValidWriteIds; import org.apache.hadoop.hive.metastore.txn.TxnUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.security.PrivilegedExceptionAction; import java.util.BitSet; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -93,7 +98,7 @@ public ThriftHMSCachedClient(HiveConf hiveConf, int poolSize) { public List getAllDatabases() { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(client.client::getAllDatabases); + return client.client.getAllDatabases(); } catch (Exception e) { client.setThrowable(e); throw e; @@ -107,7 +112,7 @@ public List getAllDatabases() { public List getAllTables(String dbName) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getAllTables(dbName)); + return client.client.getAllTables(dbName); } catch (Exception e) { client.setThrowable(e); throw e; @@ -117,11 +122,112 @@ public List getAllTables(String dbName) { } } + @Override + public void createDatabase(CatalogDatabase db) { + try (ThriftHMSClient client = getClient()) { + try { + if (db instanceof HiveCatalogDatabase) { + HiveCatalogDatabase hiveDb = (HiveCatalogDatabase) db; + Database database = new Database(); + database.setName(hiveDb.getDbName()); + if (StringUtils.isNotEmpty(hiveDb.getLocationUri())) { + database.setLocationUri(hiveDb.getLocationUri()); + } + database.setParameters(hiveDb.getProperties()); + database.setDescription(hiveDb.getComment()); + client.client.createDatabase(database); + } + } catch (Exception e) { + client.setThrowable(e); + throw e; + } + } catch (Exception e) { + throw new HMSClientException("failed to create database from hms client", e); + } + } + + @Override + public void createTable(CatalogTable tbl, boolean ignoreIfExists) { + if (tableExists(tbl.getDbName(), tbl.getTableName())) { + return; + } + try (ThriftHMSClient client = getClient()) { + try { + // sd: List cols, + // String location, + // String inputFormat, + // String outputFormat, + // Map parameters + // parameters.put("", "doris created") + if (tbl instanceof HiveCatalogTable) { + client.client.createTable(toHiveTable((HiveCatalogTable) tbl)); + } + } catch (Exception e) { + client.setThrowable(e); + throw e; + } + } catch (Exception e) { + throw new HMSClientException("failed to create database from hms client", e); + } + } + + private static Table toHiveTable(HiveCatalogTable hiveTable) { + Table table = new Table(); + table.setDbName(hiveTable.getDbName()); + table.setTableName(hiveTable.getTableName()); + // table.setOwner(""); + int createTime = (int) System.currentTimeMillis() * 1000; + table.setCreateTime(createTime); + table.setLastAccessTime(createTime); + // table.setRetention(0); + StorageDescriptor sd = new StorageDescriptor(); + sd.setInputFormat(hiveTable.getInputFormat()); + sd.setOutputFormat(hiveTable.getOutputFormat()); + Map parameters = new HashMap<>(); + parameters.put("tag", "doris created"); + sd.setParameters(parameters); + table.setSd(sd); + table.setPartitionKeys(hiveTable.getPartitionKeys()); + // table.setViewOriginalText(hiveTable.getViewSql()); + // table.setViewExpandedText(hiveTable.getViewSql()); + table.setTableType("MANAGED_TABLE"); + table.setParameters(hiveTable.getProperties()); + return table; + } + + @Override + public void dropDatabase(String dbName) { + try (ThriftHMSClient client = getClient()) { + try { + client.client.dropDatabase(dbName); + } catch (Exception e) { + client.setThrowable(e); + throw e; + } + } catch (Exception e) { + throw new HMSClientException("failed to drop database from hms client", e); + } + } + + @Override + public void dropTable(String dbName, String tblName) { + try (ThriftHMSClient client = getClient()) { + try { + client.client.dropTable(dbName, tblName); + } catch (Exception e) { + client.setThrowable(e); + throw e; + } + } catch (Exception e) { + throw new HMSClientException("failed to drop database from hms client", e); + } + } + @Override public boolean tableExists(String dbName, String tblName) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.tableExists(dbName, tblName)); + return client.client.tableExists(dbName, tblName); } catch (Exception e) { client.setThrowable(e); throw e; @@ -142,7 +248,7 @@ public List listPartitionNames(String dbName, String tblName, long maxLi short limited = maxListPartitionNum <= Short.MAX_VALUE ? (short) maxListPartitionNum : MAX_LIST_PARTITION_NUM; try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.listPartitionNames(dbName, tblName, limited)); + return client.client.listPartitionNames(dbName, tblName, limited); } catch (Exception e) { client.setThrowable(e); throw e; @@ -156,7 +262,7 @@ public List listPartitionNames(String dbName, String tblName, long maxLi public Partition getPartition(String dbName, String tblName, List partitionValues) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getPartition(dbName, tblName, partitionValues)); + return client.client.getPartition(dbName, tblName, partitionValues); } catch (Exception e) { client.setThrowable(e); throw e; @@ -171,7 +277,7 @@ public Partition getPartition(String dbName, String tblName, List partit public List getPartitions(String dbName, String tblName, List partitionNames) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getPartitionsByNames(dbName, tblName, partitionNames)); + return client.client.getPartitionsByNames(dbName, tblName, partitionNames); } catch (Exception e) { client.setThrowable(e); throw e; @@ -186,7 +292,7 @@ public List getPartitions(String dbName, String tblName, List public Database getDatabase(String dbName) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getDatabase(dbName)); + return client.client.getDatabase(dbName); } catch (Exception e) { client.setThrowable(e); throw e; @@ -200,7 +306,7 @@ public Database getDatabase(String dbName) { public Table getTable(String dbName, String tblName) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getTable(dbName, tblName)); + return client.client.getTable(dbName, tblName); } catch (Exception e) { client.setThrowable(e); throw e; @@ -214,7 +320,7 @@ public Table getTable(String dbName, String tblName) { public List getSchema(String dbName, String tblName) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getSchema(dbName, tblName)); + return client.client.getSchema(dbName, tblName); } catch (Exception e) { client.setThrowable(e); throw e; @@ -228,7 +334,7 @@ public List getSchema(String dbName, String tblName) { public List getTableColumnStatistics(String dbName, String tblName, List columns) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getTableColumnStatistics(dbName, tblName, columns)); + return client.client.getTableColumnStatistics(dbName, tblName, columns); } catch (Exception e) { client.setThrowable(e); throw e; @@ -243,7 +349,7 @@ public Map> getPartitionColumnStatistics( String dbName, String tblName, List partNames, List columns) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getPartitionColumnStatistics(dbName, tblName, partNames, columns)); + return client.client.getPartitionColumnStatistics(dbName, tblName, partNames, columns); } catch (Exception e) { client.setThrowable(e); throw e; @@ -257,7 +363,7 @@ public Map> getPartitionColumnStatistics( public CurrentNotificationEventId getCurrentNotificationEventId() { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(client.client::getCurrentNotificationEventId); + return client.client.getCurrentNotificationEventId(); } catch (Exception e) { client.setThrowable(e); throw e; @@ -271,12 +377,12 @@ public CurrentNotificationEventId getCurrentNotificationEventId() { @Override public NotificationEventResponse getNextNotification(long lastEventId, - int maxEvents, - IMetaStoreClient.NotificationFilter filter) + int maxEvents, + IMetaStoreClient.NotificationFilter filter) throws MetastoreNotificationFetchException { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.getNextNotification(lastEventId, maxEvents, filter)); + return client.client.getNextNotification(lastEventId, maxEvents, filter); } catch (Exception e) { client.setThrowable(e); throw e; @@ -293,7 +399,7 @@ public NotificationEventResponse getNextNotification(long lastEventId, public long openTxn(String user) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.openTxn(user)); + return client.client.openTxn(user); } catch (Exception e) { client.setThrowable(e); throw e; @@ -307,10 +413,7 @@ public long openTxn(String user) { public void commitTxn(long txnId) { try (ThriftHMSClient client = getClient()) { try { - ugiDoAs(() -> { - client.client.commitTxn(txnId); - return null; - }); + client.client.commitTxn(txnId); } catch (Exception e) { client.setThrowable(e); throw e; @@ -322,7 +425,7 @@ public void commitTxn(long txnId) { @Override public void acquireSharedLock(String queryId, long txnId, String user, TableName tblName, - List partitionNames, long timeoutMs) { + List partitionNames, long timeoutMs) { LockRequestBuilder request = new LockRequestBuilder(queryId).setTransactionId(txnId).setUser(user); List lockComponents = createLockComponentsForRead(tblName, partitionNames); for (LockComponent component : lockComponents) { @@ -331,7 +434,7 @@ public void acquireSharedLock(String queryId, long txnId, String user, TableName try (ThriftHMSClient client = getClient()) { LockResponse response; try { - response = ugiDoAs(() -> client.client.lock(request.build())); + response = client.client.lock(request.build()); } catch (Exception e) { client.setThrowable(e); throw e; @@ -359,22 +462,20 @@ public void acquireSharedLock(String queryId, long txnId, String user, TableName public ValidWriteIdList getValidWriteIds(String fullTableName, long currentTransactionId) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> { - // Pass currentTxn as 0L to get the recent snapshot of valid transactions in Hive - // Do not pass currentTransactionId instead as - // it will break Hive's listing of delta directories if major compaction - // deletes delta directories for valid transactions that existed at the time transaction is opened - ValidTxnList validTransactions = client.client.getValidTxns(); - List tableValidWriteIdsList = client.client.getValidWriteIds( - Collections.singletonList(fullTableName), validTransactions.toString()); - if (tableValidWriteIdsList.size() != 1) { - throw new Exception("tableValidWriteIdsList's size should be 1"); - } - ValidTxnWriteIdList validTxnWriteIdList = TxnUtils.createValidTxnWriteIdList(currentTransactionId, - tableValidWriteIdsList); - ValidWriteIdList writeIdList = validTxnWriteIdList.getTableValidWriteIdList(fullTableName); - return writeIdList; - }); + // Pass currentTxn as 0L to get the recent snapshot of valid transactions in Hive + // Do not pass currentTransactionId instead as + // it will break Hive's listing of delta directories if major compaction + // deletes delta directories for valid transactions that existed at the time transaction is opened + ValidTxnList validTransactions = client.client.getValidTxns(); + List tableValidWriteIdsList = client.client.getValidWriteIds( + Collections.singletonList(fullTableName), validTransactions.toString()); + if (tableValidWriteIdsList.size() != 1) { + throw new Exception("tableValidWriteIdsList's size should be 1"); + } + ValidTxnWriteIdList validTxnWriteIdList = TxnUtils.createValidTxnWriteIdList(currentTransactionId, + tableValidWriteIdsList); + ValidWriteIdList writeIdList = validTxnWriteIdList.getTableValidWriteIdList(fullTableName); + return writeIdList; } catch (Exception e) { client.setThrowable(e); throw e; @@ -390,7 +491,7 @@ public ValidWriteIdList getValidWriteIds(String fullTableName, long currentTrans private LockResponse checkLock(long lockId) { try (ThriftHMSClient client = getClient()) { try { - return ugiDoAs(() -> client.client.checkLock(lockId)); + return client.client.checkLock(lockId); } catch (Exception e) { client.setThrowable(e); throw e; @@ -465,7 +566,7 @@ private ThriftHMSClient getClient() throws MetaException { synchronized (clientPool) { ThriftHMSClient client = clientPool.poll(); if (client == null) { - return ugiDoAs(() -> new ThriftHMSClient(hiveConf)); + return new ThriftHMSClient(hiveConf); } return client; } @@ -474,7 +575,18 @@ private ThriftHMSClient getClient() throws MetaException { } } - private T ugiDoAs(PrivilegedExceptionAction action) { - return HiveMetaStoreClientHelper.ugiDoAs(hiveConf, action); + @Override + public String getCatalogLocation(String catalogName) { + try (ThriftHMSClient client = getClient()) { + try { + Catalog catalog = client.client.getCatalog(catalogName); + return catalog.getLocationUri(); + } catch (Exception e) { + client.setThrowable(e); + throw e; + } + } catch (Exception e) { + throw new HMSClientException("failed to get location for %s from hms client", e, catalogName); + } } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java index fadc60913bed0c..b133da2b176027 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java @@ -17,14 +17,28 @@ package org.apache.doris.datasource.iceberg; +import org.apache.doris.analysis.CreateDbStmt; +import org.apache.doris.analysis.CreateTableStmt; +import org.apache.doris.analysis.DropDbStmt; +import org.apache.doris.analysis.DropTableStmt; +import org.apache.doris.catalog.Column; import org.apache.doris.catalog.Env; +import org.apache.doris.catalog.StructField; +import org.apache.doris.catalog.StructType; import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.DdlException; import org.apache.doris.common.FeNameFormat; +import org.apache.doris.common.UserException; import org.apache.doris.common.util.Util; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.InitCatalogLog; import org.apache.doris.datasource.SessionContext; +import org.apache.doris.external.iceberg.util.DorisTypeToType; +import org.apache.doris.external.iceberg.util.DorisTypeVisitor; +import org.apache.doris.external.iceberg.util.IcebergUtils; +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.Schema; import org.apache.iceberg.catalog.Catalog; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.SupportsNamespaces; @@ -32,7 +46,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; public abstract class IcebergExternalCatalog extends ExternalCatalog { @@ -108,4 +124,52 @@ public org.apache.iceberg.Table getIcebergTable(String dbName, String tblName) { .getIcebergMetadataCache() .getIcebergTable(this, dbName, tblName); } + + @Override + public void createDb(CreateDbStmt stmt) throws DdlException { + makeSureInitialized(); + SupportsNamespaces nsCatalog = (SupportsNamespaces) catalog; + String dbName = stmt.getFullDbName(); + Map properties = stmt.getProperties(); + nsCatalog.createNamespace(Namespace.of(dbName), properties); + // TODO 增加刷新流程,否则create之后,show不出来,只能refresh之后才能show出来 + } + + @Override + public void dropDb(DropDbStmt stmt) throws DdlException { + makeSureInitialized(); + SupportsNamespaces nsCatalog = (SupportsNamespaces) catalog; + String dbName = stmt.getDbName(); + if (dbNameToId.containsKey(dbName)) { + Long aLong = dbNameToId.get(dbName); + idToDb.remove(aLong); + dbNameToId.remove(dbName); + } + nsCatalog.dropNamespace(Namespace.of(dbName)); + } + + @Override + public void createTable(CreateTableStmt stmt) throws UserException { + makeSureInitialized(); + String dbName = stmt.getDbName(); + String tableName = stmt.getTableName(); + List columns = stmt.getColumns(); + List collect = columns.stream() + .map(col -> new StructField(col.getName(), col.getType(), col.getComment(), col.isAllowNull())) + .collect(Collectors.toList()); + StructType structType = new StructType(new ArrayList<>(collect)); + org.apache.iceberg.types.Type visit = DorisTypeVisitor.visit(structType, new DorisTypeToType(structType)); + Schema schema = new Schema(visit.asNestedType().asStructType().fields()); + Map properties = stmt.getProperties(); + PartitionSpec partitionSpec = IcebergUtils.solveIcebergPartitionSpec(properties, schema); + catalog.createTable(TableIdentifier.of(dbName, tableName), schema, partitionSpec, properties); + } + + @Override + public void dropTable(DropTableStmt stmt) throws DdlException { + makeSureInitialized(); + String dbName = stmt.getDbName(); + String tableName = stmt.getTableName(); + catalog.dropTable(TableIdentifier.of(dbName, tableName)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java index 0300477361bf46..2c0e18e56f5935 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java @@ -17,13 +17,22 @@ package org.apache.doris.datasource.iceberg; +import org.apache.doris.catalog.AuthType; +import org.apache.doris.catalog.HdfsResource; +import org.apache.doris.common.Config; import org.apache.doris.datasource.CatalogProperty; +import org.apache.doris.datasource.HMSClientException; +import org.apache.doris.datasource.hive.HMSCachedClient; +import org.apache.doris.datasource.hive.HMSCachedClientFactory; import org.apache.doris.datasource.property.PropertyConverter; import org.apache.doris.datasource.property.constants.HMSProperties; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.iceberg.CatalogProperties; import org.apache.iceberg.hive.HiveCatalog; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -44,8 +53,34 @@ protected void initLocalObjectsImpl() { // initialize hive catalog Map catalogProperties = new HashMap<>(); String metastoreUris = catalogProperty.getOrDefault(HMSProperties.HIVE_METASTORE_URIS, ""); - catalogProperties.put(CatalogProperties.URI, metastoreUris); + HiveConf hiveConf = new HiveConf(); + for (Map.Entry kv : catalogProperty.getHadoopProperties().entrySet()) { + hiveConf.set(kv.getKey(), kv.getValue()); + } + hiveConf.set(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.name(), + String.valueOf(Config.hive_metastore_client_timeout_second)); + String authentication = catalogProperty.getOrDefault( + HdfsResource.HADOOP_SECURITY_AUTHENTICATION, ""); + if (AuthType.KERBEROS.getDesc().equals(authentication)) { + hiveConf.set(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authentication); + UserGroupInformation.setConfiguration(hiveConf); + try { + /** + * Because metastore client is created by using + * {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient#getProxy} + * it will relogin when TGT is expired, so we don't need to relogin manually. + */ + UserGroupInformation.loginUserFromKeytab( + catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, ""), + catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_KEYTAB, "")); + } catch (IOException e) { + throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName()); + } + } + HMSCachedClient cachedClient = HMSCachedClientFactory.createCachedClient(hiveConf, 1, null); + String location = cachedClient.getCatalogLocation("hive"); + catalogProperties.put(CatalogProperties.WAREHOUSE_LOCATION, location); hiveCatalog.initialize(icebergCatalogType, catalogProperties); catalog = hiveCatalog; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java index f66babfe03e09e..261730d4963dc4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java @@ -33,17 +33,10 @@ import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.StringLiteral; import org.apache.doris.analysis.Subquery; -import org.apache.doris.catalog.ArrayType; -import org.apache.doris.catalog.Column; -import org.apache.doris.catalog.Env; -import org.apache.doris.catalog.ScalarType; -import org.apache.doris.catalog.Type; import org.apache.doris.common.util.TimeUtils; -import org.apache.doris.datasource.ExternalCatalog; -import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper; import org.apache.doris.thrift.TExprOpcode; -import com.google.common.collect.Lists; +import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.expressions.Expression; import org.apache.iceberg.expressions.Expressions; @@ -53,17 +46,23 @@ import java.util.ArrayList; import java.util.List; -import java.util.Locale; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Iceberg utils */ public class IcebergUtils { private static final Logger LOG = LogManager.getLogger(IcebergUtils.class); - private static long MILLIS_TO_NANO_TIME = 1000; - // https://iceberg.apache.org/spec/#schemas-and-data-types - // All time and timestamp values are stored with microsecond precision - private static final int ICEBERG_DATETIME_SCALE_MS = 6; + private static ThreadLocal columnIdThreadLocal = new ThreadLocal() { + @Override + public Integer initialValue() { + return 0; + } + }; + static long MILLIS_TO_NANO_TIME = 1000; + private static final Pattern PARTITION_REG = Pattern.compile("(\\w+)\\((\\d+)?,?(\\w+)\\)"); public static Expression convertToIcebergExpr(Expr expr, Schema schema) { if (expr == null) { @@ -212,11 +211,7 @@ private static Object extractDorisLiteral(Expr expr) { return boolLiteral.getValue(); } else if (expr instanceof DateLiteral) { DateLiteral dateLiteral = (DateLiteral) expr; - if (dateLiteral.isDateType()) { - return dateLiteral.getStringValue(); - } else { - return dateLiteral.unixTimestamp(TimeUtils.getTimeZone()) * MILLIS_TO_NANO_TIME; - } + return dateLiteral.unixTimestamp(TimeUtils.getTimeZone()) * MILLIS_TO_NANO_TIME; } else if (expr instanceof DecimalLiteral) { DecimalLiteral decimalLiteral = (DecimalLiteral) expr; return decimalLiteral.getValue(); @@ -245,73 +240,55 @@ private static SlotRef convertDorisExprToSlotRef(Expr expr) { return slotRef; } - private static Type icebergPrimitiveTypeToDorisType(org.apache.iceberg.types.Type.PrimitiveType primitive) { - switch (primitive.typeId()) { - case BOOLEAN: - return Type.BOOLEAN; - case INTEGER: - return Type.INT; - case LONG: - return Type.BIGINT; - case FLOAT: - return Type.FLOAT; - case DOUBLE: - return Type.DOUBLE; - case STRING: - case BINARY: - case UUID: - return Type.STRING; - case FIXED: - Types.FixedType fixed = (Types.FixedType) primitive; - return ScalarType.createCharType(fixed.length()); - case DECIMAL: - Types.DecimalType decimal = (Types.DecimalType) primitive; - return ScalarType.createDecimalV3Type(decimal.precision(), decimal.scale()); - case DATE: - return ScalarType.createDateV2Type(); - case TIMESTAMP: - return ScalarType.createDatetimeV2Type(ICEBERG_DATETIME_SCALE_MS); - case TIME: - return Type.UNSUPPORTED; - default: - throw new IllegalArgumentException("Cannot transform unknown type: " + primitive); - } - } - - public static Type icebergTypeToDorisType(org.apache.iceberg.types.Type type) { - if (type.isPrimitiveType()) { - return icebergPrimitiveTypeToDorisType((org.apache.iceberg.types.Type.PrimitiveType) type); - } - switch (type.typeId()) { - case LIST: - Types.ListType list = (Types.ListType) type; - return ArrayType.create(icebergTypeToDorisType(list.elementType()), true); - case MAP: - case STRUCT: - return Type.UNSUPPORTED; - default: - throw new IllegalArgumentException("Cannot transform unknown type: " + type); - } - } - - /** - * Get iceberg schema from catalog and convert them to doris schema - */ - public static List getSchema(ExternalCatalog catalog, String dbName, String name) { - return HiveMetaStoreClientHelper.ugiDoAs(catalog.getConfiguration(), () -> { - org.apache.iceberg.Table icebergTable = Env.getCurrentEnv() - .getExtMetaCacheMgr() - .getIcebergMetadataCache() - .getIcebergTable(catalog, dbName, name); - Schema schema = icebergTable.schema(); - List columns = schema.columns(); - List tmpSchema = Lists.newArrayListWithCapacity(columns.size()); - for (Types.NestedField field : columns) { - tmpSchema.add(new Column(field.name().toLowerCase(Locale.ROOT), - IcebergUtils.icebergTypeToDorisType(field.type()), true, null, true, field.doc(), true, - schema.caseInsensitiveFindField(field.name()).fieldId())); + // "partition"="c1;day(c1);bucket(4,c3)" + public static PartitionSpec solveIcebergPartitionSpec(Map properties, Schema schema) { + if (properties.containsKey("partition")) { + PartitionSpec.Builder builder = PartitionSpec.builderFor(schema); + String par = properties.get("partition").replaceAll(" ", ""); + String[] pars = par.split(";"); + for (String func : pars) { + if (func.contains("(")) { + Matcher matcher = PARTITION_REG.matcher(func); + if (matcher.matches()) { + switch (matcher.group(1).toLowerCase()) { + case "bucket": + builder.bucket(matcher.group(3), Integer.parseInt(matcher.group(2))); + break; + case "year": + case "years": + builder.year(matcher.group(3)); + break; + case "month": + case "months": + builder.month(matcher.group(3)); + break; + case "date": + case "day": + case "days": + builder.day(matcher.group(3)); + break; + case "date_hour": + case "hour": + case "hours": + builder.hour(matcher.group(3)); + break; + case "truncate": + builder.truncate(matcher.group(3), Integer.parseInt(matcher.group(2))); + break; + default: + LOG.warn("unsupported partition for " + matcher.group(1)); + } + } else { + LOG.warn("failed to get partition info from " + func); + } + } else { + builder.identity(func); + } } - return tmpSchema; - }); + properties.remove("partition"); + return builder.build(); + } else { + return PartitionSpec.unpartitioned(); + } } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeToType.java b/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeToType.java new file mode 100644 index 00000000000000..52dd7446cc25ff --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeToType.java @@ -0,0 +1,139 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.external.iceberg.util; + +import org.apache.doris.catalog.ArrayType; +import org.apache.doris.catalog.MapType; +import org.apache.doris.catalog.PrimitiveType; +import org.apache.doris.catalog.ScalarType; +import org.apache.doris.catalog.StructField; +import org.apache.doris.catalog.StructType; + +import com.google.common.collect.Lists; +import org.apache.iceberg.types.Type; +import org.apache.iceberg.types.Types; + +import java.util.List; + + +/** + * Convert Doris type to Iceberg type + */ +public class DorisTypeToType extends DorisTypeVisitor { + private final StructType root; + private int nextId = 0; + + public DorisTypeToType() { + this.root = null; + } + + public DorisTypeToType(StructType root) { + this.root = root; + // the root struct's fields use the first ids + this.nextId = root.getFields().size(); + } + + private int getNextId() { + int next = nextId; + nextId += 1; + return next; + } + + @Override + public Type struct(StructType struct, List types) { + List fields = struct.getFields(); + List newFields = Lists.newArrayListWithExpectedSize(fields.size()); + boolean isRoot = root == struct; + for (int i = 0; i < fields.size(); i++) { + StructField field = fields.get(i); + Type type = types.get(i); + + int id = isRoot ? i : getNextId(); + if (field.getContainsNull()) { + newFields.add(Types.NestedField.optional(id, field.getName(), type, field.getComment())); + } else { + newFields.add(Types.NestedField.required(id, field.getName(), type, field.getComment())); + } + } + return Types.StructType.of(newFields); + } + + @Override + public Type field(StructField field, Type typeResult) { + return typeResult; + } + + @Override + public Type array(ArrayType array, Type elementType) { + if (array.getContainsNull()) { + return Types.ListType.ofOptional(getNextId(), elementType); + } else { + return Types.ListType.ofRequired(getNextId(), elementType); + } + } + + @Override + public Type map(MapType map, Type keyType, Type valueType) { + if (map.getIsValueContainsNull()) { + return Types.MapType.ofOptional(getNextId(), getNextId(), keyType, valueType); + } else { + return Types.MapType.ofRequired(getNextId(), getNextId(), keyType, valueType); + } + } + + @Override + public Type atomic(org.apache.doris.catalog.Type atomic) { + PrimitiveType primitiveType = atomic.getPrimitiveType(); + if (primitiveType.equals(PrimitiveType.BOOLEAN)) { + return Types.BooleanType.get(); + } else if (primitiveType.equals(PrimitiveType.TINYINT) + || primitiveType.equals(PrimitiveType.SMALLINT) + || primitiveType.equals(PrimitiveType.INT)) { + return Types.IntegerType.get(); + } else if (primitiveType.equals(PrimitiveType.BIGINT) + || primitiveType.equals(PrimitiveType.LARGEINT)) { + return Types.LongType.get(); + } else if (primitiveType.equals(PrimitiveType.FLOAT)) { + return Types.FloatType.get(); + } else if (primitiveType.equals(PrimitiveType.DOUBLE)) { + return Types.DoubleType.get(); + } else if (primitiveType.equals(PrimitiveType.CHAR) + || primitiveType.equals(PrimitiveType.VARCHAR) + || primitiveType.equals(PrimitiveType.STRING)) { + return Types.StringType.get(); + } else if (primitiveType.equals(PrimitiveType.DATE) + || primitiveType.equals(PrimitiveType.DATEV2)) { + return Types.DateType.get(); + } else if (primitiveType.equals(PrimitiveType.TIME) + || primitiveType.equals(PrimitiveType.TIMEV2)) { + return Types.TimeType.get(); + } else if (primitiveType.equals(PrimitiveType.DECIMALV2) + || primitiveType.isDecimalV3Type()) { + return Types.DecimalType.of( + ((ScalarType) atomic).getScalarPrecision(), + ((ScalarType) atomic).getScalarScale()); + } else if (primitiveType.equals(PrimitiveType.DATETIME) + || primitiveType.equals(PrimitiveType.DATETIMEV2)) { + return Types.TimestampType.withoutZone(); + } + // unsupported type: PrimitiveType.HLL BITMAP BINARY + + throw new UnsupportedOperationException( + "Not a supported type: " + primitiveType); + } +} From 310e9467a047104f426c50e6691fb4e8649c8477 Mon Sep 17 00:00:00 2001 From: slothever Date: Thu, 25 Jan 2024 10:21:24 +0800 Subject: [PATCH 03/16] add refresh hms --- .../java/org/apache/doris/alter/Alter.java | 20 +- .../org/apache/doris/backup/RestoreJob.java | 6 +- .../doris/catalog/CatalogRecycleBin.java | 4 +- .../org/apache/doris/catalog/Database.java | 15 +- .../org/apache/doris/catalog/DatabaseIf.java | 4 +- .../java/org/apache/doris/catalog/Env.java | 8 +- .../apache/doris/catalog/InfoSchemaDb.java | 4 +- .../catalog/MysqlCompatibleDatabase.java | 6 +- .../org/apache/doris/catalog/MysqlDb.java | 2 +- .../external/ExternalMetadataOperations.java | 40 ++++ .../catalog/external/NamedExternalTable.java | 49 +++++ .../doris/datasource/CatalogDatabase.java | 22 +++ .../apache/doris/datasource/CatalogIf.java | 13 ++ .../apache/doris/datasource/CatalogMgr.java | 28 +-- .../apache/doris/datasource/CatalogTable.java | 28 +++ .../doris/datasource/ExternalCatalog.java | 51 +++-- .../doris/datasource/ExternalDatabase.java | 27 ++- .../doris/datasource/InternalCatalog.java | 79 ++++++-- .../datasource/hive/ExternalMetadataOps.java | 44 +++++ .../datasource/hive/HMSCachedClient.java | 10 + .../hive/HMSCachedClientFactory.java | 42 ---- .../datasource/hive/HMSExternalCatalog.java | 172 +--------------- .../datasource/hive/HMSExternalDatabase.java | 31 +-- .../datasource/hive/HiveCatalogDatabase.java | 32 +++ .../datasource/hive/HiveCatalogTable.java | 94 +++++++++ .../hive/HiveMetaStoreClientHelper.java | 2 +- .../datasource/hive/HiveMetadataOps.java | 185 ++++++++++++++++++ .../hive/ThriftHMSCachedClient.java | 123 +++++++----- .../iceberg/IcebergExternalCatalog.java | 114 +---------- .../iceberg/IcebergExternalDatabase.java | 23 --- .../iceberg/IcebergExternalTable.java | 4 +- .../iceberg/IcebergGlueExternalCatalog.java | 5 +- .../iceberg/IcebergHMSExternalCatalog.java | 4 +- .../iceberg/IcebergMetadataCache.java | 8 - .../iceberg/IcebergMetadataOps.java | 138 +++++++++++++ .../datasource/iceberg/IcebergUtils.java | 93 ++++++++- .../paimon/PaimonExternalDatabase.java | 31 +-- .../property/PropertyConverter.java | 2 +- .../apache/doris/analysis/AccessTestUtil.java | 2 +- .../apache/doris/backup/CatalogMocker.java | 6 +- .../apache/doris/backup/RestoreJobTest.java | 2 +- .../apache/doris/catalog/CatalogTestUtil.java | 6 +- .../apache/doris/catalog/DatabaseTest.java | 18 +- .../doris/catalog/InfoSchemaDbTest.java | 4 +- .../org/apache/doris/catalog/MysqlDbTest.java | 4 +- .../apache/doris/clone/DiskRebalanceTest.java | 4 +- .../org/apache/doris/clone/RebalanceTest.java | 2 +- .../doris/common/util/UnitTestUtil.java | 2 +- .../apache/doris/http/DorisHttpTestCase.java | 6 +- .../apache/doris/qe/OlapQueryCacheTest.java | 14 +- 50 files changed, 1059 insertions(+), 574 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/catalog/external/ExternalMetadataOperations.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/catalog/external/NamedExternalTable.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogDatabase.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogTable.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java delete mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClientFactory.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogDatabase.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogTable.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java b/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java index 051d0fd6738767..28c9afb72222ec 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java +++ b/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java @@ -411,8 +411,8 @@ private void processModifyEngineInternal(Database db, Table externalTable, } odbcTable.writeLock(); try { - db.removeMemoryTable(mysqlTable.getName()); - db.addMemoryTable(odbcTable); + db.unregisterTable(mysqlTable.getName()); + db.registerTable(odbcTable); if (!isReplay) { ModifyTableEngineOperationLog log = new ModifyTableEngineOperationLog(db.getId(), externalTable.getId(), prop); @@ -591,17 +591,17 @@ private void replaceTableInternal(Database db, OlapTable origTable, OlapTable ne String newTblName = newTbl.getName(); // drop origin table and new table - db.removeMemoryTable(oldTblName); - db.removeMemoryTable(newTblName); + db.unregisterTable(oldTblName); + db.unregisterTable(newTblName); // rename new table name to origin table name and add it to database newTbl.checkAndSetName(oldTblName, false); - db.addMemoryTable(newTbl); + db.registerTable(newTbl); if (swapTable) { // rename origin table name to new table name and add it to database origTable.checkAndSetName(newTblName, false); - db.addMemoryTable(origTable); + db.registerTable(origTable); } else { // not swap, the origin table is not used anymore, need to drop all its tablets. Env.getCurrentEnv().onEraseOlapTable(origTable, isReplay); @@ -633,8 +633,8 @@ private void modifyViewDef(Database db, View view, String inlineViewDef, long sq } view.setNewFullSchema(newFullSchema); String viewName = view.getName(); - db.removeMemoryTable(viewName); - db.addMemoryTable(view); + db.unregisterTable(viewName); + db.registerTable(view); AlterViewInfo alterViewInfo = new AlterViewInfo(db.getId(), view.getId(), inlineViewDef, newFullSchema, sqlMode); @@ -669,8 +669,8 @@ public void replayModifyViewDef(AlterViewInfo alterViewInfo) throws MetaNotFound } view.setNewFullSchema(newFullSchema); - db.removeMemoryTable(viewName); - db.addMemoryTable(view); + db.unregisterTable(viewName); + db.registerTable(view); LOG.info("replay modify view[{}] definition to {}", viewName, inlineViewDef); } finally { diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java index e04abd920fe1db..b0ce433b1ad4d0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java +++ b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java @@ -905,7 +905,7 @@ private void checkAndPrepareMeta() { } tbl.writeLock(); try { - if (!db.addMemoryTable(tbl)) { + if (!db.registerTable(tbl)) { status = new Status(ErrCode.COMMON_ERROR, "Table " + tbl.getName() + " already exist in db: " + db.getFullName()); return; @@ -1286,7 +1286,7 @@ private void replayCheckAndPrepareMeta() { db.writeLock(); restoreTbl.writeLock(); try { - db.addMemoryTable(restoreTbl); + db.registerTable(restoreTbl); } finally { restoreTbl.writeUnlock(); db.writeUnlock(); @@ -1936,7 +1936,7 @@ public void cancelInternal(boolean isReplay) { } } } - db.removeMemoryTable(restoreTbl.getName()); + db.unregisterTable(restoreTbl.getName()); } finally { restoreTbl.writeUnlock(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java index d0107e25517685..51a0c5ba9323d1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/CatalogRecycleBin.java @@ -644,7 +644,7 @@ private void recoverAllTables(RecycleDatabaseInfo dbInfo) throws DdlException { } Table table = tableInfo.getTable(); - db.addMemoryTable(table); + db.registerTable(table); LOG.info("recover db[{}] with table[{}]: {}", dbId, table.getId(), table.getName()); iterator.remove(); idToRecycleTime.remove(table.getId()); @@ -739,7 +739,7 @@ private synchronized boolean innerRecoverTable(Database db, Table table, String } } - db.addMemoryTable(table); + db.registerTable(table); if (isReplay) { iterator.remove(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java index a1d4bda7454972..fd709963a7b073 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Database.java @@ -416,25 +416,26 @@ public Pair createTableWithLock( } } - public boolean addMemoryTable(Table table) { + public boolean registerTable(TableIf table) { boolean result = true; - table.setQualifiedDbName(fullQualifiedName); - String tableName = table.getName(); + Table olapTable = (Table) table; + olapTable.setQualifiedDbName(fullQualifiedName); + String tableName = olapTable.getName(); if (Env.isStoredTableNamesLowerCase()) { tableName = tableName.toLowerCase(); } if (isTableExist(tableName)) { result = false; } else { - idToTable.put(table.getId(), table); - nameToTable.put(table.getName(), table); + idToTable.put(olapTable.getId(), olapTable); + nameToTable.put(olapTable.getName(), olapTable); lowerCaseToTableName.put(tableName.toLowerCase(), tableName); } - table.unmarkDropped(); + olapTable.unmarkDropped(); return result; } - public void removeMemoryTable(String tableName) { + public void unregisterTable(String tableName) { if (Env.isStoredTableNamesLowerCase()) { tableName = tableName.toLowerCase(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java index bc751a95a04c8e..031989ec43baec 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java @@ -263,7 +263,9 @@ default OlapTable getOlapTableOrAnalysisException(String tableName) throws Analy return (OlapTable) table; } - void removeMemoryTable(String tableName); + boolean registerTable(TableIf table); + + void unregisterTable(String tableName); CatalogIf getCatalog(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index f5f5ba5763745e..0c4b38f93a7125 100755 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -4330,8 +4330,8 @@ public void renameTable(Database db, Table table, String newTableName) throws Dd table.setName(newTableName); } - db.removeMemoryTable(oldTableName); - db.addMemoryTable(table); + db.unregisterTable(oldTableName); + db.registerTable(table); TableInfo tableInfo = TableInfo.createForTableRename(db.getId(), table.getId(), newTableName); editLog.logTableRename(tableInfo); @@ -4363,9 +4363,9 @@ public void replayRenameTable(TableInfo tableInfo) throws MetaNotFoundException table.writeLock(); try { String tableName = table.getName(); - db.removeMemoryTable(tableName); + db.unregisterTable(tableName); table.setName(newTableName); - db.addMemoryTable(table); + db.registerTable(table); LOG.info("replay rename table[{}] to {}", tableName, newTableName); } finally { table.writeUnlock(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java index 177f81643a82c8..2a612066c76a6d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/InfoSchemaDb.java @@ -29,12 +29,12 @@ public InfoSchemaDb() { @Override protected void initTables() { for (Table table : SchemaTable.TABLE_MAP.values()) { - super.addMemoryTable(table); + super.registerTable(table); } } @Override - public boolean addMemoryTable(Table table) { + public boolean registerTable(TableIf table) { return false; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java index 57403310de7448..028fe186b2c598 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlCompatibleDatabase.java @@ -50,12 +50,12 @@ public MysqlCompatibleDatabase(long id, String name) { * @note: Rename a table of mysql database in MYSQL ls allowed. */ @Override - public boolean addMemoryTable(Table table) { - return super.addMemoryTable(table); + public boolean registerTable(TableIf table) { + return super.registerTable(table); } @Override - public void removeMemoryTable(String name) { + public void unregisterTable(String name) { // Do nothing } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java index d09b5942c21bcd..67d7fc8ecca078 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/MysqlDb.java @@ -52,7 +52,7 @@ public MysqlDb() { public void initTables() {} @Override - public boolean addMemoryTable(Table table) { + public boolean registerTable(TableIf table) { return false; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/external/ExternalMetadataOperations.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/external/ExternalMetadataOperations.java new file mode 100644 index 00000000000000..fa393673c2d054 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/external/ExternalMetadataOperations.java @@ -0,0 +1,40 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.catalog.external; + +import org.apache.doris.datasource.hive.HMSExternalCatalog; +import org.apache.doris.datasource.hive.HiveMetadataOps; +import org.apache.doris.datasource.iceberg.IcebergExternalCatalog; +import org.apache.doris.datasource.iceberg.IcebergMetadataOps; +import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.iceberg.catalog.Catalog; + + +public class ExternalMetadataOperations { + + public static HiveMetadataOps newHiveMetadataOps(HiveConf hiveConf, JdbcClientConfig jdbcClientConfig, + HMSExternalCatalog catalog) { + return new HiveMetadataOps(hiveConf, jdbcClientConfig, catalog); + } + + public static IcebergMetadataOps newIcebergMetadataOps(IcebergExternalCatalog dorisCatalog, Catalog catalog) { + return new IcebergMetadataOps(dorisCatalog, catalog); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/external/NamedExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/external/NamedExternalTable.java new file mode 100644 index 00000000000000..1d51380f4643ad --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/external/NamedExternalTable.java @@ -0,0 +1,49 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.catalog.external; + +import org.apache.doris.datasource.ExternalCatalog; +import org.apache.doris.datasource.ExternalTable; + +/** + * use to save table info. + */ +public class NamedExternalTable extends ExternalTable { + + private NamedExternalTable(long id, String name, String dbName, ExternalCatalog catalog) { + super(id, name, catalog, dbName, TableType.HMS_EXTERNAL_TABLE); + } + + public void setUpdateTime(long updateTime) { + schemaUpdateTime = updateTime; + } + + /** + * + * @param id id + * @param tableName table name + * @param dbName db name + * @param catalog catalog + * @return NamedExternalTable external table name info + */ + public static NamedExternalTable of(long id, String tableName, String dbName, ExternalCatalog catalog) { + return new NamedExternalTable(id, tableName, dbName, catalog); + } +} + + diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogDatabase.java new file mode 100644 index 00000000000000..73305d099a7774 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogDatabase.java @@ -0,0 +1,22 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource; + +public interface CatalogDatabase { + String getDbName(); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java index 7fd240aa8d07d2..ffae9420edeb48 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogIf.java @@ -17,6 +17,10 @@ package org.apache.doris.datasource; +import org.apache.doris.analysis.CreateDbStmt; +import org.apache.doris.analysis.CreateTableStmt; +import org.apache.doris.analysis.DropDbStmt; +import org.apache.doris.analysis.DropTableStmt; import org.apache.doris.catalog.DatabaseIf; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.TableIf; @@ -24,6 +28,7 @@ import org.apache.doris.common.DdlException; import org.apache.doris.common.ErrorCode; import org.apache.doris.common.MetaNotFoundException; +import org.apache.doris.common.UserException; import com.google.common.base.Strings; import com.google.common.collect.Lists; @@ -177,4 +182,12 @@ default CatalogLog constructEditLog() { boolean enableAutoAnalyze(); ConcurrentHashMap getIdToDb(); + + void createDb(CreateDbStmt stmt) throws DdlException; + + void dropDb(DropDbStmt stmt) throws DdlException; + + void createTable(CreateTableStmt stmt) throws UserException; + + void dropTable(DropTableStmt stmt) throws DdlException; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index d01a9c27618c21..e97647d00a8f4c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -31,6 +31,7 @@ import org.apache.doris.catalog.Resource; import org.apache.doris.catalog.Resource.ReferenceType; import org.apache.doris.catalog.TableIf; +import org.apache.doris.catalog.external.NamedExternalTable; import org.apache.doris.cluster.ClusterNamespace; import org.apache.doris.common.AnalysisException; import org.apache.doris.common.CaseSensibility; @@ -45,7 +46,6 @@ import org.apache.doris.common.util.PrintableMap; import org.apache.doris.common.util.TimeUtils; import org.apache.doris.datasource.hive.HMSExternalCatalog; -import org.apache.doris.datasource.hive.HMSExternalDatabase; import org.apache.doris.datasource.hive.HMSExternalTable; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.persist.OperationType; @@ -747,14 +747,14 @@ public void replayRefreshExternalTable(ExternalObjectLog log) { public void unloadExternalTable(String dbName, String tableName, String catalogName, boolean ignoreIfExists) throws DdlException { - CatalogIf catalog = nameToCatalog.get(catalogName); + CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { throw new DdlException("No catalog found with name: " + catalogName); } if (!(catalog instanceof ExternalCatalog)) { throw new DdlException("Only support drop ExternalCatalog Tables"); } - DatabaseIf db = catalog.getDbNullable(dbName); + ExternalDatabase db = ((ExternalCatalog) catalog).getDbNullable(dbName); if (db == null) { if (!ignoreIfExists) { throw new DdlException("Database " + dbName + " does not exist in catalog " + catalog.getName()); @@ -772,10 +772,7 @@ public void unloadExternalTable(String dbName, String tableName, String catalogN db.writeLock(); try { - db.removeMemoryTable(table.getName()); - Env.getCurrentEnv().getExtMetaCacheMgr().invalidateTableCache( - catalog.getId(), db.getFullName(), table.getName()); - ((HMSExternalDatabase) db).setLastUpdateTime(System.currentTimeMillis()); + db.unregisterTable(table.getName()); } finally { db.writeUnlock(); } @@ -817,7 +814,7 @@ public void loadExternalTableFromEvent(String dbName, String tableName, } return; } - + // TODO:防止event和catalog建表的tableID冲突 long tblId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(catalog.getId(), dbName, tableName); // -1L means it will be dropped later, ignore if (tblId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { @@ -826,12 +823,9 @@ public void loadExternalTableFromEvent(String dbName, String tableName, db.writeLock(); try { - ((HMSExternalDatabase) db).addMemoryTable(tableName, tblId); - ((HMSExternalDatabase) db).setLastUpdateTime(System.currentTimeMillis()); - table = db.getTableNullable(tableName); - if (table != null) { - ((HMSExternalTable) table).setEventUpdateTime(updateTime); - } + NamedExternalTable namedTable = NamedExternalTable.of(tblId, tableName, dbName, (ExternalCatalog) catalog); + namedTable.setUpdateTime(updateTime); + db.registerTable(namedTable); } finally { db.writeUnlock(); } @@ -853,9 +847,7 @@ public void removeExternalDatabase(String dbName, String catalogName, boolean ig } return; } - - ((HMSExternalCatalog) catalog).removeDatabase(dbName); - Env.getCurrentEnv().getExtMetaCacheMgr().invalidateDbCache(catalog.getId(), dbName); + ((HMSExternalCatalog) catalog).unregisterDatabase(dbName); } public void addExternalDatabase(String dbName, String catalogName, boolean ignoreIfExists) @@ -881,7 +873,7 @@ public void addExternalDatabase(String dbName, String catalogName, boolean ignor return; } - ((HMSExternalCatalog) catalog).addDatabase(dbId, dbName); + ((HMSExternalCatalog) catalog).registerDatabase(dbId, dbName); } public void addExternalPartitions(String catalogName, String dbName, String tableName, diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogTable.java new file mode 100644 index 00000000000000..cae613d3c919ed --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogTable.java @@ -0,0 +1,28 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource; + +import java.util.Map; + +public interface CatalogTable { + String getDbName(); + + String getTableName(); + + Map getProperties(); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index c71c13e4e83624..998b8a0eefee2c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -34,6 +34,7 @@ import org.apache.doris.common.io.Writable; import org.apache.doris.common.util.Util; import org.apache.doris.datasource.es.EsExternalDatabase; +import org.apache.doris.datasource.hive.ExternalMetadataOps; import org.apache.doris.datasource.hive.HMSExternalCatalog; import org.apache.doris.datasource.hive.HMSExternalDatabase; import org.apache.doris.datasource.iceberg.IcebergExternalDatabase; @@ -105,6 +106,7 @@ public abstract class ExternalCatalog protected Map dbNameToId = Maps.newConcurrentMap(); private boolean objectCreated = false; protected boolean invalidCacheInInit = true; + protected ExternalMetadataOps metadataOps; private ExternalSchemaCache schemaCache; private String comment; @@ -134,16 +136,21 @@ public Configuration getConfiguration() { /** * set some default properties when creating catalog + * @return list of database names in this catalog */ + protected List listDatabaseNames() { + if (metadataOps == null) { + throw new UnsupportedOperationException("Unsupported operation: " + + "listDatabaseNames from remote client when init catalog with " + logType.name()); + } else { + return metadataOps.listDatabaseNames(); + } + } + public void setDefaultPropsWhenCreating(boolean isReplay) throws DdlException { } - /** - * @return list of database names in this catalog - */ - protected abstract List listDatabaseNames(); - /** * @param dbName * @return names of tables in specified database @@ -583,30 +590,48 @@ public void addDatabaseForTest(ExternalDatabase db) { dbNameToId.put(ClusterNamespace.getNameFromFullName(db.getFullName()), db.getId()); } + @Override public void createDb(CreateDbStmt stmt) throws DdlException { - throw new NotImplementedException("dropDatabase not implemented"); + makeSureInitialized(); + if (metadataOps == null) { + throw new NotImplementedException("dropDatabase not implemented"); + } + metadataOps.createDb(stmt); } + @Override public void dropDb(DropDbStmt stmt) throws DdlException { - throw new NotImplementedException("dropDatabase not implemented"); + makeSureInitialized(); + if (metadataOps == null) { + throw new NotImplementedException("dropDatabase not implemented"); + } + metadataOps.dropDb(stmt); } @Override public void createTable(CreateTableStmt stmt) throws UserException { - throw new NotImplementedException("createTable not implemented"); + makeSureInitialized(); + if (metadataOps == null) { + throw new NotImplementedException("createTable not implemented"); + } + metadataOps.createTable(stmt); } @Override public void dropTable(DropTableStmt stmt) throws DdlException { - throw new NotImplementedException("dropTable not implemented"); + makeSureInitialized(); + if (metadataOps == null) { + throw new NotImplementedException("dropTable not implemented"); + } + metadataOps.dropTable(stmt); } - public void removeDatabase(String dbName) { - throw new NotImplementedException("dropDatabase not implemented"); + public void unregisterDatabase(String dbName) { + throw new NotImplementedException("unregisterDatabase not implemented"); } - public void addDatabase(long dbId, String dbName) { - throw new NotImplementedException("createDatabase not implemented"); + public void registerDatabase(long dbId, String dbName) { + throw new NotImplementedException("registerDatabase not implemented"); } public Map getIncludeDatabaseMap() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java index cddd32e89d1910..5411927eb570b2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalDatabase.java @@ -364,8 +364,19 @@ public void gsonPostProcess() throws IOException { } @Override - public void removeMemoryTable(String tableName) { - throw new NotImplementedException("removeMemoryTable is not implemented at external database"); + public void unregisterTable(String tableName) { + if (LOG.isDebugEnabled()) { + LOG.debug("create table [{}]", tableName); + } + Long tableId = tableNameToId.remove(tableName); + if (tableId == null) { + LOG.warn("table [{}] does not exist when drop", tableName); + return; + } + idToTbl.remove(tableId); + setLastUpdateTime(System.currentTimeMillis()); + Env.getCurrentEnv().getExtMetaCacheMgr().invalidateTableCache( + extCatalog.getId(), getFullName(), tableName); } @Override @@ -374,8 +385,16 @@ public CatalogIf getCatalog() { } // Only used for sync hive metastore event - public void addMemoryTable(String tableName, long tableId) { - throw new NotImplementedException("addMemoryTable is not implemented at external database."); + public boolean registerTable(TableIf tableIf) { + long tableId = tableIf.getId(); + String tableName = tableIf.getName(); + if (LOG.isDebugEnabled()) { + LOG.debug("create table [{}]", tableName); + } + tableNameToId.put(tableName, tableId); + idToTbl.put(tableId, newExternalTable(tableName, tableId, extCatalog)); + setLastUpdateTime(System.currentTimeMillis()); + return true; } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java index 92e0c4ec0a6c39..e3f61def6e51ab 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java @@ -41,6 +41,7 @@ import org.apache.doris.analysis.FunctionCallExpr; import org.apache.doris.analysis.HashDistributionDesc; import org.apache.doris.analysis.KeysDesc; +import org.apache.doris.analysis.LiteralExpr; import org.apache.doris.analysis.PartitionDesc; import org.apache.doris.analysis.PartitionKeyDesc; import org.apache.doris.analysis.QueryStmt; @@ -64,6 +65,7 @@ import org.apache.doris.catalog.DatabaseProperty; import org.apache.doris.catalog.DistributionInfo; import org.apache.doris.catalog.DistributionInfo.DistributionInfoType; +import org.apache.doris.catalog.DynamicPartitionProperty; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.EnvFactory; import org.apache.doris.catalog.EsTable; @@ -133,7 +135,7 @@ import org.apache.doris.common.util.Util; import org.apache.doris.datasource.es.EsRepository; import org.apache.doris.datasource.hive.HMSCachedClient; -import org.apache.doris.datasource.hive.HMSCachedClientFactory; +import org.apache.doris.datasource.hive.HiveMetadataOps; import org.apache.doris.datasource.property.constants.HMSProperties; import org.apache.doris.nereids.trees.plans.commands.info.DropMTMVInfo; import org.apache.doris.nereids.trees.plans.commands.info.TableNameInfo; @@ -175,6 +177,7 @@ import lombok.Getter; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.time.StopWatch; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -650,7 +653,7 @@ public void recoverTable(RecoverTableStmt recoverStmt) throws DdlException { String tableName = recoverStmt.getTableName(); String newTableName = recoverStmt.getNewTableName(); - Database db = (Database) getDbOrDdlException(dbName); + Database db = getDbOrDdlException(dbName); db.writeLockOrDdlException(); try { if (Strings.isNullOrEmpty(newTableName)) { @@ -699,10 +702,6 @@ public void recoverPartition(RecoverPartitionStmt recoverStmt) throws DdlExcepti } } - public void replayEraseDatabase(long dbId) throws DdlException { - Env.getCurrentRecycleBin().replayEraseDatabase(dbId); - } - public void replayRecoverDatabase(RecoverInfo info) { long dbId = info.getDbId(); String newDbName = info.getNewDbName(); @@ -836,6 +835,8 @@ public void replayRenameDatabase(String dbName, String newDbName) { // Drop table public void dropTable(DropTableStmt stmt) throws DdlException { + Map costTimes = new TreeMap(); + StopWatch watch = StopWatch.createStarted(); String dbName = stmt.getDbName(); String tableName = stmt.getTableName(); LOG.info("begin to drop table: {} from db: {}, is force: {}", tableName, dbName, stmt.isForceDrop()); @@ -847,6 +848,8 @@ public void dropTable(DropTableStmt stmt) throws DdlException { } db.writeLockOrDdlException(); + watch.split(); + costTimes.put("1:dbWriteLock", watch.getSplitTime()); try { Table table = db.getTableNullable(tableName); if (table == null) { @@ -882,8 +885,12 @@ public void dropTable(DropTableStmt stmt) throws DdlException { + "] cannot be dropped. If you want to forcibly drop(cannot be recovered)," + " please use \"DROP table FORCE\"."); } + watch.split(); + costTimes.put("2:existCommittedTxns", watch.getSplitTime()); } table.writeLock(); + watch.split(); + costTimes.put("3:tableWriteLock", watch.getSplitTime()); long recycleTime = 0; try { if (table instanceof OlapTable && !stmt.isForceDrop()) { @@ -899,8 +906,12 @@ public void dropTable(DropTableStmt stmt) throws DdlException { Env.getCurrentEnv().getMtmvService().dropMTMV((MTMV) table); } unprotectDropTable(db, table, stmt.isForceDrop(), false, 0); + watch.split(); + costTimes.put("4:unprotectDropTable", watch.getSplitTime()); if (!stmt.isForceDrop()) { recycleTime = Env.getCurrentRecycleBin().getRecycleTimeById(table.getId()); + watch.split(); + costTimes.put("5:getRecycleTimeById", watch.getSplitTime()); } } finally { table.writeUnlock(); @@ -920,7 +931,10 @@ public void dropTable(DropTableStmt stmt) throws DdlException { } finally { db.writeUnlock(); } - LOG.info("finished dropping table: {} from db: {}, is force: {}", tableName, dbName, stmt.isForceDrop()); + watch.stop(); + costTimes.put("6:total", watch.getTime()); + LOG.info("finished dropping table: {} from db: {}, is force: {} cost: {}", + tableName, dbName, stmt.isForceDrop(), costTimes); } public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, boolean isReplay, @@ -935,9 +949,12 @@ public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, Env.getCurrentEnv().getMtmvService().deregisterMTMV((MTMV) table); } - db.removeMemoryTable(table.getName()); + db.unregisterTable(table.getName()); + StopWatch watch = StopWatch.createStarted(); Env.getCurrentRecycleBin().recycleTable(db.getId(), table, isReplay, isForceDrop, recycleTime); - LOG.info("finished dropping table[{}] in db[{}]", table.getName(), db.getFullName()); + watch.stop(); + LOG.info("finished dropping table[{}] in db[{}] recycleTable cost: {}ms", + table.getName(), db.getFullName(), watch.getTime()); return true; } @@ -1248,6 +1265,11 @@ public void createTableAsSelect(CreateTableAsSelectStmt stmt) throws DdlExceptio default: throw new DdlException("Unsupported string type for ctas"); } + if (resultExpr.getSrcSlotRef() != null + && resultExpr.getSrcSlotRef().getTable() != null + && !resultExpr.getSrcSlotRef().getTable().isManagedTable()) { + typeDef = new TypeDef(ScalarType.createStringType()); + } } else if (resultType.isDecimalV2() && resultType.equals(ScalarType.DECIMALV2)) { typeDef = new TypeDef(ScalarType.createDecimalType(27, 9)); } else if (resultType.isDecimalV3()) { @@ -1791,10 +1813,12 @@ public void dropPartition(Database db, OlapTable olapTable, DropPartitionClause recycleTime = Env.getCurrentRecycleBin().getRecycleTimeById(partition.getId()); } } - + long version = olapTable.getNextVersion(); + long versionTime = System.currentTimeMillis(); + olapTable.updateVisibleVersionAndTime(version, versionTime); // log DropPartitionInfo info = new DropPartitionInfo(db.getId(), olapTable.getId(), partitionName, isTempPartition, - clause.isForceDrop(), recycleTime); + clause.isForceDrop(), recycleTime, version, versionTime); Env.getCurrentEnv().getEditLog().logDropPartition(info); LOG.info("succeed in dropping partition[{}], table : [{}-{}], is temp : {}, is force : {}", @@ -1815,6 +1839,7 @@ public void replayDropPartition(DropPartitionInfo info) throws MetaNotFoundExcep Env.getCurrentRecycleBin().setRecycleTimeByIdForReplay(partition.getId(), info.getRecycleTime()); } } + olapTable.updateVisibleVersionAndTime(info.getVersion(), info.getVersionTime()); } finally { olapTable.writeUnlock(); } @@ -2001,7 +2026,7 @@ protected void afterCreatePartitions(long tableId, List partitionIds, List throws DdlException { } - protected void checkAvailableCapacity(Database db) throws DdlException { + public void checkAvailableCapacity(Database db) throws DdlException { // check cluster capacity Env.getCurrentSystemInfo().checkAvailableCapacity(); // check db quota @@ -2179,7 +2204,7 @@ private void createOlapTable(Database db, CreateTableStmt stmt) throws UserExcep // set time series compaction time threshold long timeSeriesCompactionTimeThresholdSeconds - = PropertyAnalyzer.TIME_SERIES_COMPACTION_TIME_THRESHOLD_SECONDS_DEFAULT_VALUE; + = PropertyAnalyzer.TIME_SERIES_COMPACTION_TIME_THRESHOLD_SECONDS_DEFAULT_VALUE; try { timeSeriesCompactionTimeThresholdSeconds = PropertyAnalyzer .analyzeTimeSeriesCompactionTimeThresholdSeconds(properties); @@ -2190,7 +2215,7 @@ private void createOlapTable(Database db, CreateTableStmt stmt) throws UserExcep // set time series compaction empty rowsets threshold long timeSeriesCompactionEmptyRowsetsThreshold - = PropertyAnalyzer.TIME_SERIES_COMPACTION_EMPTY_ROWSETS_THRESHOLD_DEFAULT_VALUE; + = PropertyAnalyzer.TIME_SERIES_COMPACTION_EMPTY_ROWSETS_THRESHOLD_DEFAULT_VALUE; try { timeSeriesCompactionEmptyRowsetsThreshold = PropertyAnalyzer .analyzeTimeSeriesCompactionEmptyRowsetsThreshold(properties); @@ -2472,7 +2497,7 @@ private void createOlapTable(Database db, CreateTableStmt stmt) throws UserExcep if (!col.getType().isFixedPointType() && !col.getType().isDateType()) { throw new DdlException("Sequence type only support integer types and date types"); } - olapTable.setSequenceMapCol(sequenceMapCol); + olapTable.setSequenceMapCol(col.getName()); olapTable.setSequenceInfo(col.getType()); } } catch (Exception e) { @@ -2591,7 +2616,27 @@ private void createOlapTable(Database db, CreateTableStmt stmt) throws UserExcep "Only support dynamic partition properties on range partition table"); } } - + // check the interval same between dynamic & auto range partition + DynamicPartitionProperty dynamicProperty = olapTable.getTableProperty() + .getDynamicPartitionProperty(); + if (dynamicProperty.isExist() && dynamicProperty.getEnable() + && partitionDesc.isAutoCreatePartitions()) { + String dynamicUnit = dynamicProperty.getTimeUnit(); + ArrayList autoExprs = partitionDesc.getPartitionExprs(); + for (Expr autoExpr : autoExprs) { + Expr func = (FunctionCallExpr) autoExpr; + for (Expr child : func.getChildren()) { + if (child instanceof LiteralExpr) { + String autoUnit = ((LiteralExpr) child).getStringValue(); + if (!dynamicUnit.equalsIgnoreCase(autoUnit)) { + throw new AnalysisException( + "If support auto partition and dynamic partition at same time, " + + "they must have the same interval unit."); + } + } + } + } + } } catch (AnalysisException e) { throw new DdlException(e.getMessage()); } @@ -2816,7 +2861,7 @@ private void createHiveTable(Database db, CreateTableStmt stmt) throws DdlExcept if (!Strings.isNullOrEmpty(hiveTable.getHiveProperties().get(HMSProperties.HIVE_VERSION))) { hiveConf.set(HMSProperties.HIVE_VERSION, hiveTable.getHiveProperties().get(HMSProperties.HIVE_VERSION)); } - HMSCachedClient client = new HMSCachedClientFactory().createCachedClient(hiveConf, 1, null); + HMSCachedClient client = HiveMetadataOps.createCachedClient(hiveConf, 1, null); if (!client.tableExists(hiveTable.getHiveDb(), hiveTable.getHiveTable())) { throw new DdlException(String.format("Table [%s] dose not exist in Hive.", hiveTable.getHiveDbTable())); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java new file mode 100644 index 00000000000000..66fb9b994b5065 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java @@ -0,0 +1,44 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.hive; + +import org.apache.doris.analysis.CreateDbStmt; +import org.apache.doris.analysis.CreateTableStmt; +import org.apache.doris.analysis.DropDbStmt; +import org.apache.doris.analysis.DropTableStmt; +import org.apache.doris.common.DdlException; +import org.apache.doris.common.UserException; + +import java.util.List; + +public interface ExternalMetadataOps { + + void createDb(CreateDbStmt stmt) throws DdlException; + + void dropDb(DropDbStmt stmt) throws DdlException; + + void createTable(CreateTableStmt stmt) throws UserException; + + void dropTable(DropTableStmt stmt) throws DdlException; + + List listDatabaseNames(); + + List listTableNames(String db); + + boolean tableExist(String dbName, String tblName); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java index e083e43f898176..297248e7a48d96 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java @@ -18,6 +18,8 @@ package org.apache.doris.datasource.hive; import org.apache.doris.analysis.TableName; +import org.apache.doris.datasource.CatalogDatabase; +import org.apache.doris.datasource.CatalogTable; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; import org.apache.hadoop.hive.common.ValidWriteIdList; @@ -80,4 +82,12 @@ void acquireSharedLock(String queryId, long txnId, String user, TableName tblNam List partitionNames, long timeoutMs); String getCatalogLocation(String catalogName); + + void createDatabase(CatalogDatabase catalogDatabase); + + void dropDatabase(String dbName); + + void dropTable(String dbName, String tableName); + + void createTable(CatalogTable catalogTable, boolean ignoreIfExists); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClientFactory.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClientFactory.java deleted file mode 100644 index 10e81993d03638..00000000000000 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClientFactory.java +++ /dev/null @@ -1,42 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.datasource.hive; - -import org.apache.doris.catalog.JdbcResource; -import org.apache.doris.datasource.jdbc.client.JdbcClient; -import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; - -import com.google.common.base.Preconditions; -import org.apache.hadoop.hive.conf.HiveConf; - -public class HMSCachedClientFactory { - public static HMSCachedClient createCachedClient(HiveConf hiveConf, int thriftClientPoolSize, - JdbcClientConfig jdbcClientConfig) { - if (hiveConf != null) { - return new ThriftHMSCachedClient(hiveConf, thriftClientPoolSize); - } - Preconditions.checkNotNull(jdbcClientConfig, "hiveConf and jdbcClientConfig are both null"); - String dbType = JdbcClient.parseDbType(jdbcClientConfig.getJdbcUrl()); - switch (dbType) { - case JdbcResource.POSTGRESQL: - return new PostgreSQLJdbcHMSCachedClient(jdbcClientConfig); - default: - throw new IllegalArgumentException("Unsupported DB type: " + dbType); - } - } -} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java index 0aa6b592ebe54b..e4581e67ddd3e8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java @@ -17,24 +17,17 @@ package org.apache.doris.datasource.hive; -import org.apache.doris.analysis.CreateDbStmt; -import org.apache.doris.analysis.CreateTableStmt; -import org.apache.doris.analysis.DropDbStmt; -import org.apache.doris.analysis.DropTableStmt; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.HdfsResource; +import org.apache.doris.catalog.external.ExternalMetadataOperations; import org.apache.doris.cluster.ClusterNamespace; import org.apache.doris.common.Config; import org.apache.doris.common.DdlException; -import org.apache.doris.common.UserException; import org.apache.doris.common.security.authentication.AuthenticationConfig; import org.apache.doris.common.security.authentication.HadoopUGI; -import org.apache.doris.common.util.QueryableReentrantLock; -import org.apache.doris.common.util.Util; import org.apache.doris.datasource.CatalogProperty; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; -import org.apache.doris.datasource.ExternalMetaIdMgr; import org.apache.doris.datasource.ExternalTable; import org.apache.doris.datasource.InitCatalogLog; import org.apache.doris.datasource.SessionContext; @@ -46,23 +39,18 @@ import com.google.common.collect.Lists; import org.apache.commons.lang3.math.NumberUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.TimeUnit; /** * External catalog for hive metastore compatible data sources. */ public class HMSExternalCatalog extends ExternalCatalog { private static final Logger LOG = LogManager.getLogger(HMSExternalCatalog.class); - private static final int MIN_CLIENT_POOL_SIZE = 8; - protected HMSCachedClient client; public static final String FILE_META_CACHE_TTL_SECOND = "file.meta.cache.ttl-second"; // broker name for file split and query scan. @@ -73,7 +61,6 @@ public class HMSExternalCatalog extends ExternalCatalog { public static final int FILE_META_CACHE_NO_TTL = -1; // 0 means file cache is disabled; >0 means file cache with ttl; public static final int FILE_META_CACHE_TTL_DISABLE_CACHE = 0; - private QueryableReentrantLock lock = new QueryableReentrantLock(true); public HMSExternalCatalog() { catalogProperty = new CatalogProperty(null, null); @@ -157,10 +144,9 @@ protected void initLocalObjectsImpl() { HadoopUGI.tryKrbLogin(this.getName(), AuthenticationConfig.getKerberosConfig(hiveConf, AuthenticationConfig.HIVE_KERBEROS_PRINCIPAL, AuthenticationConfig.HIVE_KERBEROS_KEYTAB)); - client = HMSCachedClientFactory.createCachedClient(hiveConf, - Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size), - jdbcClientConfig); + metadataOps = ExternalMetadataOperations.newHiveMetadataOps(hiveConf, jdbcClientConfig, this); } + } @Override @@ -172,13 +158,13 @@ public List listTableNames(SessionContext ctx, String dbName) { hmsExternalDatabase.getTables().forEach(table -> names.add(table.getName())); return names; } else { - return client.getAllTables(ClusterNamespace.getNameFromFullName(dbName)); + return metadataOps.listTableNames(ClusterNamespace.getNameFromFullName(dbName)); } } @Override public boolean tableExist(SessionContext ctx, String dbName, String tblName) { - return client.tableExists(ClusterNamespace.getNameFromFullName(dbName), tblName); + return metadataOps.tableExist(ClusterNamespace.getNameFromFullName(dbName), tblName); } @Override @@ -193,11 +179,11 @@ public boolean tableExistInLocal(String dbName, String tblName) { public HMSCachedClient getClient() { makeSureInitialized(); - return client; + return ((HiveMetadataOps) metadataOps).getClient(); } @Override - public void dropDatabase(String dbName) { + public void unregisterDatabase(String dbName) { if (LOG.isDebugEnabled()) { LOG.debug("drop database [{}]", dbName); } @@ -206,10 +192,11 @@ public void dropDatabase(String dbName) { LOG.warn("drop database [{}] failed", dbName); } idToDb.remove(dbId); + Env.getCurrentEnv().getExtMetaCacheMgr().invalidateDbCache(getId(), dbName); } @Override - public void createDatabase(long dbId, String dbName) { + public void registerDatabase(long dbId, String dbName) { if (LOG.isDebugEnabled()) { LOG.debug("create database [{}]", dbName); } @@ -227,143 +214,6 @@ public void notifyPropertiesUpdated(Map updatedProps) { } } - private boolean tryLock(boolean mustLock) { - while (true) { - try { - if (!lock.tryLock(Config.catalog_try_lock_timeout_ms, TimeUnit.MILLISECONDS)) { - // to see which thread held this lock for long time. - Thread owner = lock.getOwner(); - if (owner != null) { - // There are many catalog timeout during regression test - // And this timeout should not happen very often, so it could be info log - LOG.info("catalog lock is held by: {}", Util.dumpThread(owner, 10)); - } - - if (mustLock) { - continue; - } else { - return false; - } - } - return true; - } catch (InterruptedException e) { - LOG.warn("got exception while getting catalog lock", e); - if (mustLock) { - continue; - } else { - return lock.isHeldByCurrentThread(); - } - } - } - } - - private void unlock() { - if (lock.isHeldByCurrentThread()) { - this.lock.unlock(); - } - } - - @Override - public void createDb(CreateDbStmt stmt) throws DdlException { - String fullDbName = stmt.getFullDbName(); - Map properties = stmt.getProperties(); - long id = Env.getCurrentEnv().getNextId(); - - if (!tryLock(false)) { - throw new DdlException("Failed to acquire catalog lock. Try again"); - } - try { - HiveCatalogDatabase catalogDatabase = new HiveCatalogDatabase(); - catalogDatabase.setDbName(fullDbName); - catalogDatabase.setProperties(properties); - if (properties.containsKey("location_uri")) { - catalogDatabase.setLocationUri(properties.get("location_uri")); - } - catalogDatabase.setComment(properties.getOrDefault("comment", "")); - client.createDatabase(catalogDatabase); - addDatabase(id, fullDbName); - } finally { - unlock(); - } - LOG.info("createDb dbName = " + fullDbName + ", id = " + id); - } - - public void dropDb(DropDbStmt stmt) throws DdlException { - if (!tryLock(false)) { - throw new DdlException("Failed to acquire catalog lock. Try again"); - } - try { - client.dropDatabase(stmt.getDbName()); - removeDatabase(stmt.getDbName()); - } finally { - unlock(); - } - } - - @Override - public void createTable(CreateTableStmt stmt) throws UserException { - if (!tryLock(false)) { - throw new DdlException("Failed to acquire catalog lock. Try again"); - } - String dbName = stmt.getDbName(); - String tblName = stmt.getTableName(); - ExternalDatabase db = getDbNullable(dbName); - if (db == null) { - throw new UserException("Failed to get database: '" + dbName + "' in catalog: " + this.getName()); - } - try { - HiveCatalogTable catalogTable = new HiveCatalogTable(); - catalogTable.setDbName(dbName); - catalogTable.setTableName(tblName); - Map props = stmt.getExtProperties(); - catalogTable.setProperties(props); - String inputFormat = props.getOrDefault("input_format", - "org.apache.hadoop.mapred.TextInputFormat"); - String outputFormat = props.getOrDefault("output_format", - "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); - catalogTable.setInputFormat(inputFormat); - catalogTable.setOutputFormat(outputFormat); - catalogTable.setPartitionKeys(parsePartitionKeys(props)); - client.createTable(catalogTable, stmt.isSetIfNotExists()); - long tableId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(getId(), dbName, tblName); - if (tableId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { - return; - } - db.addMemoryTable(tblName, tableId); - } finally { - unlock(); - } - } - - private static List parsePartitionKeys(Map props) { - List parsedKeys = new ArrayList<>(); - String pkStr = props.getOrDefault("partition_keys", ""); - if (pkStr.isEmpty()) { - return parsedKeys; - } else { - // TODO: parse string to partition keys list - return parsedKeys; - } - } - - @Override - public void dropTable(DropTableStmt stmt) throws DdlException { - if (!tryLock(false)) { - throw new DdlException("Failed to acquire catalog lock. Try again"); - } - String dbName = stmt.getDbName(); - ExternalDatabase db = getDbNullable(stmt.getDbName()); - if (db == null) { - throw new DdlException("Failed to get database: '" + dbName + "' in catalog: " + this.getName()); - } - try { - client.dropTable(dbName, stmt.getTableName()); - db.removeMemoryTable(stmt.getTableName()); - } finally { - unlock(); - } - } - @Override public void setDefaultPropsWhenCreating(boolean isReplay) { if (isReplay) { @@ -382,8 +232,4 @@ public String getHiveMetastoreUris() { public String getHiveVersion() { return catalogProperty.getOrDefault(HMSProperties.HIVE_VERSION, ""); } - - protected List listDatabaseNames() { - return client.getAllDatabases(); - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java index ee9b033c256820..8a16e66996f37a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalDatabase.java @@ -17,19 +17,15 @@ package org.apache.doris.datasource.hive; +import org.apache.doris.catalog.TableIf; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.InitDatabaseLog; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - /** * Hive metastore external database. */ public class HMSExternalDatabase extends ExternalDatabase { - private static final Logger LOG = LogManager.getLogger(HMSExternalDatabase.class); - /** * Create HMS external database. * @@ -52,25 +48,12 @@ public void addTableForTest(HMSExternalTable tbl) { } @Override - public void removeMemoryTable(String tableName) { - if (LOG.isDebugEnabled()) { - LOG.debug("replayDropTableFromEvent [{}]", tableName); - } - LOG.debug("replayDropTableFromEvent [{}]", tableName); - Long tableId = tableNameToId.remove(tableName); - if (tableId == null) { - LOG.warn("replayDropTableFromEvent [{}] failed", tableName); - return; - } - idToTbl.remove(tableId); - } - - @Override - public void addMemoryTable(String tableName, long tableId) { - if (LOG.isDebugEnabled()) { - LOG.debug("create table [{}]", tableName); + public boolean registerTable(TableIf tableIf) { + super.registerTable(tableIf); + HMSExternalTable table = getTableNullable(tableIf.getName()); + if (table != null) { + table.setEventUpdateTime(tableIf.getUpdateTime()); } - tableNameToId.put(tableName, tableId); - idToTbl.put(tableId, newExternalTable(tableName, tableId, extCatalog)); + return true; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogDatabase.java new file mode 100644 index 00000000000000..f2917996fba376 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogDatabase.java @@ -0,0 +1,32 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.hive; + +import org.apache.doris.datasource.CatalogDatabase; + +import lombok.Data; + +import java.util.Map; + +@Data +public class HiveCatalogDatabase implements CatalogDatabase { + private String dbName; + private String locationUri; + private Map properties; + private String comment; +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogTable.java new file mode 100644 index 00000000000000..e799400bef0924 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogTable.java @@ -0,0 +1,94 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.hive; + +import org.apache.doris.catalog.Column; +import org.apache.doris.datasource.CatalogTable; + +import org.apache.hadoop.hive.metastore.api.FieldSchema; + +import java.util.List; +import java.util.Map; + +public class HiveCatalogTable implements CatalogTable { + private String dbName; + private String tableName; + private List columns; + private List partitionKeys; + private String inputFormat; + private String outputFormat; + private Map properties; + // private String viewSql; + + public HiveCatalogTable(String dbName, + String tblName, + List columns, + List partitionKeys, + Map props, + String inputFormat, + String outputFormat) { + this.dbName = dbName; + this.tableName = tblName; + this.columns = columns; + this.partitionKeys = partitionKeys; + this.inputFormat = inputFormat; + this.outputFormat = outputFormat; + this.properties = props; + } + + @Override + public String getDbName() { + return dbName; + } + + @Override + public String getTableName() { + return tableName; + } + + @Override + public Map getProperties() { + return properties; + } + + public List getColumns() { + return columns; + } + + public List getPartitionKeys() { + return partitionKeys; + } + + public String getInputFormat() { + return inputFormat; + } + + public String getOutputFormat() { + return outputFormat; + } + + public static HiveCatalogTable of(String dbName, + String tblName, + List columns, + List partitionKeys, + Map props, + String inputFormat, + String outputFormat) { + return new HiveCatalogTable(dbName, tblName, columns, partitionKeys, props, inputFormat, outputFormat); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java index d5656778cdf535..4343fa1ff5baa1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java @@ -150,7 +150,7 @@ public static String getFormat(String input) throws DdlException { } } - public static IMetaStoreClient getClient(String metaStoreUris) throws DdlException { + private static IMetaStoreClient getClient(String metaStoreUris) throws DdlException { HiveConf hiveConf = new HiveConf(); hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreUris); hiveConf.set(ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.name(), diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java new file mode 100644 index 00000000000000..54b5ebf4de0f81 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -0,0 +1,185 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.hive; + +import org.apache.doris.analysis.CreateDbStmt; +import org.apache.doris.analysis.CreateTableStmt; +import org.apache.doris.analysis.DropDbStmt; +import org.apache.doris.analysis.DropTableStmt; +import org.apache.doris.catalog.Env; +import org.apache.doris.catalog.JdbcResource; +import org.apache.doris.catalog.external.ExternalDatabase; +import org.apache.doris.catalog.external.NamedExternalTable; +import org.apache.doris.common.Config; +import org.apache.doris.common.DdlException; +import org.apache.doris.common.UserException; +import org.apache.doris.datasource.ExternalMetaIdMgr; +import org.apache.doris.datasource.HMSExternalCatalog; +import org.apache.doris.datasource.jdbc.client.JdbcClient; +import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; + +import com.google.common.base.Preconditions; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class HiveMetadataOps implements ExternalMetadataOps { + private static final Logger LOG = LogManager.getLogger(HiveMetadataOps.class); + private static final int MIN_CLIENT_POOL_SIZE = 8; + private JdbcClientConfig jdbcClientConfig; + private HiveConf hiveConf; + private HMSExternalCatalog catalog; + private HMSCachedClient client; + + public HiveMetadataOps(HiveConf hiveConf, JdbcClientConfig jdbcClientConfig, HMSExternalCatalog catalog) { + this.hiveConf = hiveConf; + this.jdbcClientConfig = jdbcClientConfig; + this.client = createCachedClient(hiveConf, + Math.max(MIN_CLIENT_POOL_SIZE, Config.max_external_cache_loader_thread_pool_size), jdbcClientConfig); + } + + public HMSCachedClient getClient() { + return client; + } + + public static HMSCachedClient createCachedClient(HiveConf hiveConf, int thriftClientPoolSize, + JdbcClientConfig jdbcClientConfig) { + if (hiveConf != null) { + return new ThriftHMSCachedClient(hiveConf, thriftClientPoolSize); + } + Preconditions.checkNotNull(jdbcClientConfig, "hiveConf and jdbcClientConfig are both null"); + String dbType = JdbcClient.parseDbType(jdbcClientConfig.getJdbcUrl()); + switch (dbType) { + case JdbcResource.POSTGRESQL: + return new PostgreSQLJdbcHMSCachedClient(jdbcClientConfig); + default: + throw new IllegalArgumentException("Unsupported DB type: " + dbType); + } + } + + @Override + public void createDb(CreateDbStmt stmt) throws DdlException { + String fullDbName = stmt.getFullDbName(); + Map properties = stmt.getProperties(); + long id = Env.getCurrentEnv().getNextId(); + try { + HiveCatalogDatabase catalogDatabase = new HiveCatalogDatabase(); + catalogDatabase.setDbName(fullDbName); + catalogDatabase.setProperties(properties); + if (properties.containsKey("location_uri")) { + catalogDatabase.setLocationUri(properties.get("location_uri")); + } + catalogDatabase.setComment(properties.getOrDefault("comment", "")); + client.createDatabase(catalogDatabase); + catalog.registerDatabase(id, fullDbName); + } catch (Exception e) { + throw new RuntimeException(e.getMessage(), e); + } + LOG.info("createDb dbName = " + fullDbName + ", id = " + id); + } + + @Override + public void dropDb(DropDbStmt stmt) throws DdlException { + String dbName = stmt.getDbName(); + try { + client.dropDatabase(dbName); + catalog.unregisterDatabase(dbName); + } catch (Exception e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + @Override + public void createTable(CreateTableStmt stmt) throws UserException { + String dbName = stmt.getDbName(); + String tblName = stmt.getTableName(); + ExternalDatabase db = catalog.getDbNullable(dbName); + if (db == null) { + throw new UserException("Failed to get database: '" + dbName + "' in catalog: " + catalog.getName()); + } + try { + Map props = stmt.getExtProperties(); + String inputFormat = props.getOrDefault("input_format", + "org.apache.hadoop.mapred.TextInputFormat"); // 定义成可配置的常量 + String outputFormat = props.getOrDefault("output_format", + "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); // 定义成可配置的常量 + HiveCatalogTable catalogTable = HiveCatalogTable.of(dbName, + tblName, + stmt.getColumns(), + parsePartitionKeys(props), + props, + inputFormat, + outputFormat); // use HiveCatalogTable.of + + client.createTable(catalogTable, stmt.isSetIfNotExists()); + // TODO: need add first, use increased id + long tableId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(catalog.getId(), dbName, tblName); + if (tableId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { + return; + } + db.registerTable(NamedExternalTable.of(tableId, tblName, dbName, catalog)); + } catch (Exception e) { + throw new UserException(e.getMessage(), e); + } + } + + private static List parsePartitionKeys(Map props) { + List parsedKeys = new ArrayList<>(); + String pkStr = props.getOrDefault("partition_keys", ""); + if (pkStr.isEmpty()) { + return parsedKeys; + } else { + // TODO: parse string to partition keys list + return parsedKeys; + } + } + + @Override + public void dropTable(DropTableStmt stmt) throws DdlException { + String dbName = stmt.getDbName(); + ExternalDatabase db = catalog.getDbNullable(stmt.getDbName()); + if (db == null) { + throw new DdlException("Failed to get database: '" + dbName + "' in catalog: " + catalog.getName()); + } + try { + client.dropTable(dbName, stmt.getTableName()); + db.unregisterTable(stmt.getTableName()); + } catch (Exception e) { + throw new DdlException(e.getMessage(), e); + } + } + + @Override + public List listTableNames(String dbName) { + return client.getAllTables(dbName); + } + + @Override + public boolean tableExist(String dbName, String tblName) { + return client.tableExists(dbName, tblName); + } + + public List listDatabaseNames() { + return client.getAllDatabases(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index b0d21be084b2d4..eb4fcfe272b496 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -59,7 +59,9 @@ import org.apache.hadoop.hive.metastore.txn.TxnUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; +import java.security.PrivilegedExceptionAction; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; @@ -98,7 +100,7 @@ public ThriftHMSCachedClient(HiveConf hiveConf, int poolSize) { public List getAllDatabases() { try (ThriftHMSClient client = getClient()) { try { - return client.client.getAllDatabases(); + return ugiDoAs(client.client::getAllDatabases); } catch (Exception e) { client.setThrowable(e); throw e; @@ -112,7 +114,7 @@ public List getAllDatabases() { public List getAllTables(String dbName) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getAllTables(dbName); + return ugiDoAs(() -> client.client.getAllTables(dbName)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -128,14 +130,10 @@ public void createDatabase(CatalogDatabase db) { try { if (db instanceof HiveCatalogDatabase) { HiveCatalogDatabase hiveDb = (HiveCatalogDatabase) db; - Database database = new Database(); - database.setName(hiveDb.getDbName()); - if (StringUtils.isNotEmpty(hiveDb.getLocationUri())) { - database.setLocationUri(hiveDb.getLocationUri()); - } - database.setParameters(hiveDb.getProperties()); - database.setDescription(hiveDb.getComment()); - client.client.createDatabase(database); + ugiDoAs(() -> { + client.client.createDatabase(toHiveDatabase(hiveDb)); + return null; + }); } } catch (Exception e) { client.setThrowable(e); @@ -146,6 +144,18 @@ public void createDatabase(CatalogDatabase db) { } } + @NotNull + private static Database toHiveDatabase(HiveCatalogDatabase hiveDb) { + Database database = new Database(); + database.setName(hiveDb.getDbName()); + if (StringUtils.isNotEmpty(hiveDb.getLocationUri())) { + database.setLocationUri(hiveDb.getLocationUri()); + } + database.setParameters(hiveDb.getProperties()); + database.setDescription(hiveDb.getComment()); + return database; + } + @Override public void createTable(CatalogTable tbl, boolean ignoreIfExists) { if (tableExists(tbl.getDbName(), tbl.getTableName())) { @@ -160,7 +170,10 @@ public void createTable(CatalogTable tbl, boolean ignoreIfExists) { // Map parameters // parameters.put("", "doris created") if (tbl instanceof HiveCatalogTable) { - client.client.createTable(toHiveTable((HiveCatalogTable) tbl)); + ugiDoAs(() -> { + client.client.createTable(toHiveTable((HiveCatalogTable) tbl)); + return null; + }); } } catch (Exception e) { client.setThrowable(e); @@ -181,6 +194,7 @@ private static Table toHiveTable(HiveCatalogTable hiveTable) { table.setLastAccessTime(createTime); // table.setRetention(0); StorageDescriptor sd = new StorageDescriptor(); + // sd.setCols(toHiveColumns(hiveTable.getColumns())); sd.setInputFormat(hiveTable.getInputFormat()); sd.setOutputFormat(hiveTable.getOutputFormat()); Map parameters = new HashMap<>(); @@ -199,7 +213,10 @@ private static Table toHiveTable(HiveCatalogTable hiveTable) { public void dropDatabase(String dbName) { try (ThriftHMSClient client = getClient()) { try { - client.client.dropDatabase(dbName); + ugiDoAs(() -> { + client.client.dropDatabase(dbName); + return null; + }); } catch (Exception e) { client.setThrowable(e); throw e; @@ -213,7 +230,10 @@ public void dropDatabase(String dbName) { public void dropTable(String dbName, String tblName) { try (ThriftHMSClient client = getClient()) { try { - client.client.dropTable(dbName, tblName); + ugiDoAs(() -> { + client.client.dropTable(dbName, tblName); + return null; + }); } catch (Exception e) { client.setThrowable(e); throw e; @@ -227,7 +247,7 @@ public void dropTable(String dbName, String tblName) { public boolean tableExists(String dbName, String tblName) { try (ThriftHMSClient client = getClient()) { try { - return client.client.tableExists(dbName, tblName); + return ugiDoAs(() -> client.client.tableExists(dbName, tblName)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -248,7 +268,7 @@ public List listPartitionNames(String dbName, String tblName, long maxLi short limited = maxListPartitionNum <= Short.MAX_VALUE ? (short) maxListPartitionNum : MAX_LIST_PARTITION_NUM; try (ThriftHMSClient client = getClient()) { try { - return client.client.listPartitionNames(dbName, tblName, limited); + return ugiDoAs(() -> client.client.listPartitionNames(dbName, tblName, limited)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -262,7 +282,7 @@ public List listPartitionNames(String dbName, String tblName, long maxLi public Partition getPartition(String dbName, String tblName, List partitionValues) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getPartition(dbName, tblName, partitionValues); + return ugiDoAs(() -> client.client.getPartition(dbName, tblName, partitionValues)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -277,7 +297,7 @@ public Partition getPartition(String dbName, String tblName, List partit public List getPartitions(String dbName, String tblName, List partitionNames) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getPartitionsByNames(dbName, tblName, partitionNames); + return ugiDoAs(() -> client.client.getPartitionsByNames(dbName, tblName, partitionNames)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -292,7 +312,7 @@ public List getPartitions(String dbName, String tblName, List public Database getDatabase(String dbName) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getDatabase(dbName); + return ugiDoAs(() -> client.client.getDatabase(dbName)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -306,7 +326,7 @@ public Database getDatabase(String dbName) { public Table getTable(String dbName, String tblName) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getTable(dbName, tblName); + return ugiDoAs(() -> client.client.getTable(dbName, tblName)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -320,7 +340,7 @@ public Table getTable(String dbName, String tblName) { public List getSchema(String dbName, String tblName) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getSchema(dbName, tblName); + return ugiDoAs(() -> client.client.getSchema(dbName, tblName)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -334,7 +354,7 @@ public List getSchema(String dbName, String tblName) { public List getTableColumnStatistics(String dbName, String tblName, List columns) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getTableColumnStatistics(dbName, tblName, columns); + return ugiDoAs(() -> client.client.getTableColumnStatistics(dbName, tblName, columns)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -349,7 +369,7 @@ public Map> getPartitionColumnStatistics( String dbName, String tblName, List partNames, List columns) { try (ThriftHMSClient client = getClient()) { try { - return client.client.getPartitionColumnStatistics(dbName, tblName, partNames, columns); + return ugiDoAs(() -> client.client.getPartitionColumnStatistics(dbName, tblName, partNames, columns)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -363,7 +383,7 @@ public Map> getPartitionColumnStatistics( public CurrentNotificationEventId getCurrentNotificationEventId() { try (ThriftHMSClient client = getClient()) { try { - return client.client.getCurrentNotificationEventId(); + return ugiDoAs(client.client::getCurrentNotificationEventId); } catch (Exception e) { client.setThrowable(e); throw e; @@ -377,12 +397,12 @@ public CurrentNotificationEventId getCurrentNotificationEventId() { @Override public NotificationEventResponse getNextNotification(long lastEventId, - int maxEvents, - IMetaStoreClient.NotificationFilter filter) + int maxEvents, + IMetaStoreClient.NotificationFilter filter) throws MetastoreNotificationFetchException { try (ThriftHMSClient client = getClient()) { try { - return client.client.getNextNotification(lastEventId, maxEvents, filter); + return ugiDoAs(() -> client.client.getNextNotification(lastEventId, maxEvents, filter)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -399,7 +419,7 @@ public NotificationEventResponse getNextNotification(long lastEventId, public long openTxn(String user) { try (ThriftHMSClient client = getClient()) { try { - return client.client.openTxn(user); + return ugiDoAs(() -> client.client.openTxn(user)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -413,7 +433,10 @@ public long openTxn(String user) { public void commitTxn(long txnId) { try (ThriftHMSClient client = getClient()) { try { - client.client.commitTxn(txnId); + ugiDoAs(() -> { + client.client.commitTxn(txnId); + return null; + }); } catch (Exception e) { client.setThrowable(e); throw e; @@ -425,7 +448,7 @@ public void commitTxn(long txnId) { @Override public void acquireSharedLock(String queryId, long txnId, String user, TableName tblName, - List partitionNames, long timeoutMs) { + List partitionNames, long timeoutMs) { LockRequestBuilder request = new LockRequestBuilder(queryId).setTransactionId(txnId).setUser(user); List lockComponents = createLockComponentsForRead(tblName, partitionNames); for (LockComponent component : lockComponents) { @@ -434,7 +457,7 @@ public void acquireSharedLock(String queryId, long txnId, String user, TableName try (ThriftHMSClient client = getClient()) { LockResponse response; try { - response = client.client.lock(request.build()); + response = ugiDoAs(() -> client.client.lock(request.build())); } catch (Exception e) { client.setThrowable(e); throw e; @@ -462,20 +485,22 @@ public void acquireSharedLock(String queryId, long txnId, String user, TableName public ValidWriteIdList getValidWriteIds(String fullTableName, long currentTransactionId) { try (ThriftHMSClient client = getClient()) { try { - // Pass currentTxn as 0L to get the recent snapshot of valid transactions in Hive - // Do not pass currentTransactionId instead as - // it will break Hive's listing of delta directories if major compaction - // deletes delta directories for valid transactions that existed at the time transaction is opened - ValidTxnList validTransactions = client.client.getValidTxns(); - List tableValidWriteIdsList = client.client.getValidWriteIds( - Collections.singletonList(fullTableName), validTransactions.toString()); - if (tableValidWriteIdsList.size() != 1) { - throw new Exception("tableValidWriteIdsList's size should be 1"); - } - ValidTxnWriteIdList validTxnWriteIdList = TxnUtils.createValidTxnWriteIdList(currentTransactionId, - tableValidWriteIdsList); - ValidWriteIdList writeIdList = validTxnWriteIdList.getTableValidWriteIdList(fullTableName); - return writeIdList; + return ugiDoAs(() -> { + // Pass currentTxn as 0L to get the recent snapshot of valid transactions in Hive + // Do not pass currentTransactionId instead as + // it will break Hive's listing of delta directories if major compaction + // deletes delta directories for valid transactions that existed at the time transaction is opened + ValidTxnList validTransactions = client.client.getValidTxns(); + List tableValidWriteIdsList = client.client.getValidWriteIds( + Collections.singletonList(fullTableName), validTransactions.toString()); + if (tableValidWriteIdsList.size() != 1) { + throw new Exception("tableValidWriteIdsList's size should be 1"); + } + ValidTxnWriteIdList validTxnWriteIdList = TxnUtils.createValidTxnWriteIdList(currentTransactionId, + tableValidWriteIdsList); + ValidWriteIdList writeIdList = validTxnWriteIdList.getTableValidWriteIdList(fullTableName); + return writeIdList; + }); } catch (Exception e) { client.setThrowable(e); throw e; @@ -491,7 +516,7 @@ public ValidWriteIdList getValidWriteIds(String fullTableName, long currentTrans private LockResponse checkLock(long lockId) { try (ThriftHMSClient client = getClient()) { try { - return client.client.checkLock(lockId); + return ugiDoAs(() -> client.client.checkLock(lockId)); } catch (Exception e) { client.setThrowable(e); throw e; @@ -566,7 +591,7 @@ private ThriftHMSClient getClient() throws MetaException { synchronized (clientPool) { ThriftHMSClient client = clientPool.poll(); if (client == null) { - return new ThriftHMSClient(hiveConf); + return ugiDoAs(() -> new ThriftHMSClient(hiveConf)); } return client; } @@ -579,7 +604,7 @@ private ThriftHMSClient getClient() throws MetaException { public String getCatalogLocation(String catalogName) { try (ThriftHMSClient client = getClient()) { try { - Catalog catalog = client.client.getCatalog(catalogName); + Catalog catalog = ugiDoAs(() -> client.client.getCatalog(catalogName)); return catalog.getLocationUri(); } catch (Exception e) { client.setThrowable(e); @@ -589,4 +614,8 @@ public String getCatalogLocation(String catalogName) { throw new HMSClientException("failed to get location for %s from hms client", e, catalogName); } } + + private T ugiDoAs(PrivilegedExceptionAction action) { + return HiveMetaStoreClientHelper.ugiDoAs(hiveConf, action); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java index b133da2b176027..f791d5bf83ab42 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java @@ -17,43 +17,17 @@ package org.apache.doris.datasource.iceberg; -import org.apache.doris.analysis.CreateDbStmt; -import org.apache.doris.analysis.CreateTableStmt; -import org.apache.doris.analysis.DropDbStmt; -import org.apache.doris.analysis.DropTableStmt; -import org.apache.doris.catalog.Column; -import org.apache.doris.catalog.Env; -import org.apache.doris.catalog.StructField; -import org.apache.doris.catalog.StructType; -import org.apache.doris.common.AnalysisException; -import org.apache.doris.common.DdlException; -import org.apache.doris.common.FeNameFormat; -import org.apache.doris.common.UserException; -import org.apache.doris.common.util.Util; +import org.apache.doris.catalog.external.ExternalMetadataOperations; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.InitCatalogLog; import org.apache.doris.datasource.SessionContext; -import org.apache.doris.external.iceberg.util.DorisTypeToType; -import org.apache.doris.external.iceberg.util.DorisTypeVisitor; -import org.apache.doris.external.iceberg.util.IcebergUtils; -import org.apache.iceberg.PartitionSpec; -import org.apache.iceberg.Schema; import org.apache.iceberg.catalog.Catalog; -import org.apache.iceberg.catalog.Namespace; -import org.apache.iceberg.catalog.SupportsNamespaces; -import org.apache.iceberg.catalog.TableIdentifier; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import java.util.ArrayList; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; public abstract class IcebergExternalCatalog extends ExternalCatalog { - private static final Logger LOG = LogManager.getLogger(IcebergExternalCatalog.class); public static final String ICEBERG_CATALOG_TYPE = "iceberg.catalog.type"; public static final String ICEBERG_REST = "rest"; public static final String ICEBERG_HMS = "hms"; @@ -62,7 +36,6 @@ public abstract class IcebergExternalCatalog extends ExternalCatalog { public static final String ICEBERG_DLF = "dlf"; protected String icebergCatalogType; protected Catalog catalog; - protected SupportsNamespaces nsCatalog; public IcebergExternalCatalog(long catalogId, String name, String comment) { super(catalogId, name, InitCatalogLog.Type.ICEBERG, comment); @@ -70,18 +43,13 @@ public IcebergExternalCatalog(long catalogId, String name, String comment) { @Override protected void init() { - nsCatalog = (SupportsNamespaces) catalog; super.init(); + metadataOps = ExternalMetadataOperations.newIcebergMetadataOps(this, catalog); } public Catalog getCatalog() { makeSureInitialized(); - return catalog; - } - - public SupportsNamespaces getNsCatalog() { - makeSureInitialized(); - return nsCatalog; + return ((IcebergMetadataOps) metadataOps).getCatalog(); } public String getIcebergCatalogType() { @@ -89,87 +57,15 @@ public String getIcebergCatalogType() { return icebergCatalogType; } - protected List listDatabaseNames() { - return nsCatalog.listNamespaces().stream() - .map(e -> { - String dbName = e.toString(); - try { - FeNameFormat.checkDbName(dbName); - } catch (AnalysisException ex) { - Util.logAndThrowRuntimeException(LOG, - String.format("Not a supported namespace name format: %s", dbName), ex); - } - return dbName; - }) - .collect(Collectors.toList()); - } - @Override public boolean tableExist(SessionContext ctx, String dbName, String tblName) { makeSureInitialized(); - return catalog.tableExists(TableIdentifier.of(dbName, tblName)); + return metadataOps.tableExist(dbName, tblName); } @Override public List listTableNames(SessionContext ctx, String dbName) { makeSureInitialized(); - List tableIdentifiers = catalog.listTables(Namespace.of(dbName)); - return tableIdentifiers.stream().map(TableIdentifier::name).collect(Collectors.toList()); - } - - public org.apache.iceberg.Table getIcebergTable(String dbName, String tblName) { - makeSureInitialized(); - return Env.getCurrentEnv() - .getExtMetaCacheMgr() - .getIcebergMetadataCache() - .getIcebergTable(this, dbName, tblName); - } - - @Override - public void createDb(CreateDbStmt stmt) throws DdlException { - makeSureInitialized(); - SupportsNamespaces nsCatalog = (SupportsNamespaces) catalog; - String dbName = stmt.getFullDbName(); - Map properties = stmt.getProperties(); - nsCatalog.createNamespace(Namespace.of(dbName), properties); - // TODO 增加刷新流程,否则create之后,show不出来,只能refresh之后才能show出来 - } - - @Override - public void dropDb(DropDbStmt stmt) throws DdlException { - makeSureInitialized(); - SupportsNamespaces nsCatalog = (SupportsNamespaces) catalog; - String dbName = stmt.getDbName(); - if (dbNameToId.containsKey(dbName)) { - Long aLong = dbNameToId.get(dbName); - idToDb.remove(aLong); - dbNameToId.remove(dbName); - } - nsCatalog.dropNamespace(Namespace.of(dbName)); - } - - @Override - public void createTable(CreateTableStmt stmt) throws UserException { - makeSureInitialized(); - String dbName = stmt.getDbName(); - String tableName = stmt.getTableName(); - List columns = stmt.getColumns(); - List collect = columns.stream() - .map(col -> new StructField(col.getName(), col.getType(), col.getComment(), col.isAllowNull())) - .collect(Collectors.toList()); - StructType structType = new StructType(new ArrayList<>(collect)); - org.apache.iceberg.types.Type visit = DorisTypeVisitor.visit(structType, new DorisTypeToType(structType)); - Schema schema = new Schema(visit.asNestedType().asStructType().fields()); - Map properties = stmt.getProperties(); - PartitionSpec partitionSpec = IcebergUtils.solveIcebergPartitionSpec(properties, schema); - catalog.createTable(TableIdentifier.of(dbName, tableName), schema, partitionSpec, properties); - } - - @Override - public void dropTable(DropTableStmt stmt) throws DdlException { - makeSureInitialized(); - String dbName = stmt.getDbName(); - String tableName = stmt.getTableName(); - catalog.dropTable(TableIdentifier.of(dbName, tableName)); + return metadataOps.listTableNames(dbName); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java index 8ea86fdf904ef0..6875364437b8e1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java @@ -46,27 +46,4 @@ public List getTablesOnIdOrder() { // Sort the name instead, because the id may change. return getTables().stream().sorted(Comparator.comparing(TableIf::getName)).collect(Collectors.toList()); } - - @Override - public void removeMemoryTable(String tableName) { - if (LOG.isDebugEnabled()) { - LOG.debug("drop table [{}]", tableName); - } - Long tableId = tableNameToId.remove(tableName); - if (tableId == null) { - LOG.warn("drop table [{}] failed", tableName); - } - idToTbl.remove(tableId); - } - - @Override - public void addMemoryTable(String tableName, long tableId) { - if (LOG.isDebugEnabled()) { - LOG.debug("create table [{}]", tableName); - } - tableNameToId.put(tableName, tableId); - IcebergExternalTable table = new IcebergExternalTable(tableId, tableName, name, - (IcebergExternalCatalog) extCatalog); - idToTbl.put(tableId, table); - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java index 21f7c1d3d21cf8..eaf7d80c183185 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java @@ -35,6 +35,7 @@ import java.util.Optional; public class IcebergExternalTable extends ExternalTable { + public IcebergExternalTable(long id, String name, String dbName, IcebergExternalCatalog catalog) { super(id, name, catalog, dbName, TableType.ICEBERG_EXTERNAL_TABLE); } @@ -52,7 +53,6 @@ protected synchronized void makeSureInitialized() { @Override public List initSchema() { - makeSureInitialized(); return IcebergUtils.getSchema(catalog, dbName, name); } @@ -79,7 +79,7 @@ public Optional getColumnStatistic(String colName) { makeSureInitialized(); return HiveMetaStoreClientHelper.ugiDoAs(catalog.getConfiguration(), () -> StatisticsUtil.getIcebergColumnStats(colName, - ((IcebergExternalCatalog) catalog).getIcebergTable(dbName, name))); + IcebergUtils.getIcebergTable(catalog, dbName, name))); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java index a1be776292da41..6b00330d0fd15a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java @@ -26,6 +26,7 @@ import org.apache.iceberg.aws.glue.GlueCatalog; import org.apache.iceberg.aws.s3.S3FileIOProperties; import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.SupportsNamespaces; import java.util.List; import java.util.Map; @@ -63,8 +64,6 @@ protected void initLocalObjectsImpl() { @Override protected List listDatabaseNames() { - return nsCatalog.listNamespaces().stream() - .map(Namespace::toString) - .collect(Collectors.toList()); + return metadataOps.listDatabaseNames(); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java index 2c0e18e56f5935..bb3c8c4d83a41b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java @@ -23,7 +23,7 @@ import org.apache.doris.datasource.CatalogProperty; import org.apache.doris.datasource.HMSClientException; import org.apache.doris.datasource.hive.HMSCachedClient; -import org.apache.doris.datasource.hive.HMSCachedClientFactory; +import org.apache.doris.datasource.hive.HiveMetadataOps; import org.apache.doris.datasource.property.PropertyConverter; import org.apache.doris.datasource.property.constants.HMSProperties; @@ -78,7 +78,7 @@ protected void initLocalObjectsImpl() { throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName()); } } - HMSCachedClient cachedClient = HMSCachedClientFactory.createCachedClient(hiveConf, 1, null); + HMSCachedClient cachedClient = HiveMetadataOps.createCachedClient(hiveConf, 1, null); String location = cachedClient.getCatalogLocation("hive"); catalogProperties.put(CatalogProperties.WAREHOUSE_LOCATION, location); hiveCatalog.initialize(icebergCatalogType, catalogProperties); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java index aebc385f50f670..10fa205b095133 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java @@ -102,14 +102,6 @@ public Table getIcebergTable(CatalogIf catalog, String dbName, String tbName) { return icebergTable; } - private Table getIcebergTable(Catalog catalog, long catalogId, String dbName, String tbName, - Map props) { - Table table = HiveMetaStoreClientHelper.ugiDoAs(catalogId, - () -> catalog.loadTable(TableIdentifier.of(dbName, tbName))); - initIcebergTableFileIO(table, props); - return table; - } - public void invalidateCatalogCache(long catalogId) { snapshotListCache.asMap().keySet().stream() .filter(key -> key.catalogId == catalogId) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java new file mode 100644 index 00000000000000..ed35e613146e93 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java @@ -0,0 +1,138 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +import org.apache.doris.analysis.CreateDbStmt; +import org.apache.doris.analysis.CreateTableStmt; +import org.apache.doris.analysis.DropDbStmt; +import org.apache.doris.analysis.DropTableStmt; +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; +import org.apache.doris.catalog.StructField; +import org.apache.doris.catalog.StructType; +import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.DdlException; +import org.apache.doris.common.FeNameFormat; +import org.apache.doris.common.UserException; +import org.apache.doris.common.util.Util; +import org.apache.doris.datasource.hive.ExternalMetadataOps; +import org.apache.doris.external.iceberg.util.DorisTypeToType; +import org.apache.doris.external.iceberg.util.DorisTypeVisitor; +import org.apache.doris.external.iceberg.util.IcebergUtils; + +import org.apache.iceberg.PartitionSpec; +import org.apache.iceberg.Schema; +import org.apache.iceberg.catalog.Catalog; +import org.apache.iceberg.catalog.Namespace; +import org.apache.iceberg.catalog.SupportsNamespaces; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class IcebergMetadataOps implements ExternalMetadataOps { + + private static final Logger LOG = LogManager.getLogger(IcebergMetadataOps.class); + protected Catalog catalog; + protected IcebergExternalCatalog dorisCatalog; + protected SupportsNamespaces nsCatalog; + + public IcebergMetadataOps(IcebergExternalCatalog dorisCatalog, Catalog catalog) { + this.dorisCatalog = dorisCatalog; + this.catalog = catalog; + nsCatalog = (SupportsNamespaces) catalog; + } + + public Catalog getCatalog() { + return catalog; + } + + @Override + public boolean tableExist(String dbName, String tblName) { + return catalog.tableExists(TableIdentifier.of(dbName, tblName)); + } + + public List listDatabaseNames() { + return nsCatalog.listNamespaces().stream() + .map(e -> { + String dbName = e.toString(); + try { + FeNameFormat.checkDbName(dbName); + } catch (AnalysisException ex) { + Util.logAndThrowRuntimeException(LOG, + String.format("Not a supported namespace name format: %s", dbName), ex); + } + return dbName; + }) + .collect(Collectors.toList()); + } + + @Override + public List listTableNames(String dbName) { + List tableIdentifiers = catalog.listTables(Namespace.of(dbName)); + return tableIdentifiers.stream().map(TableIdentifier::name).collect(Collectors.toList()); + } + + @Override + public void createDb(CreateDbStmt stmt) throws DdlException { + SupportsNamespaces nsCatalog = (SupportsNamespaces) catalog; + String dbName = stmt.getFullDbName(); + Map properties = stmt.getProperties(); + nsCatalog.createNamespace(Namespace.of(dbName), properties); + // TODO 增加刷新流程,否则create之后,show不出来,只能refresh之后才能show出来 + } + + @Override + public void dropDb(DropDbStmt stmt) throws DdlException { + SupportsNamespaces nsCatalog = (SupportsNamespaces) catalog; + String dbName = stmt.getDbName(); + if (dorisCatalog.getDbNameToId().containsKey(dbName)) { + Long aLong = dorisCatalog.getDbNameToId().get(dbName); + dorisCatalog.getIdToDb().remove(aLong); + dorisCatalog.getDbNameToId().remove(dbName); + } + nsCatalog.dropNamespace(Namespace.of(dbName)); + } + + @Override + public void createTable(CreateTableStmt stmt) throws UserException { + String dbName = stmt.getDbName(); + String tableName = stmt.getTableName(); + List columns = stmt.getColumns(); + List collect = columns.stream() + .map(col -> new StructField(col.getName(), col.getType(), col.getComment(), col.isAllowNull())) + .collect(Collectors.toList()); + StructType structType = new StructType(new ArrayList<>(collect)); + org.apache.iceberg.types.Type visit = DorisTypeVisitor.visit(structType, new DorisTypeToType(structType)); + Schema schema = new Schema(visit.asNestedType().asStructType().fields()); + Map properties = stmt.getProperties(); + PartitionSpec partitionSpec = IcebergUtils.solveIcebergPartitionSpec(properties, schema); + catalog.createTable(TableIdentifier.of(dbName, tableName), schema, partitionSpec, properties); + } + + @Override + public void dropTable(DropTableStmt stmt) throws DdlException { + String dbName = stmt.getDbName(); + String tableName = stmt.getTableName(); + catalog.dropTable(TableIdentifier.of(dbName, tableName)); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java index 261730d4963dc4..ffb17ea852f8e7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java @@ -17,7 +17,6 @@ package org.apache.doris.datasource.iceberg; - import org.apache.doris.analysis.BinaryPredicate; import org.apache.doris.analysis.BoolLiteral; import org.apache.doris.analysis.CastExpr; @@ -33,9 +32,17 @@ import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.StringLiteral; import org.apache.doris.analysis.Subquery; +import org.apache.doris.catalog.ArrayType; +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; +import org.apache.doris.catalog.ScalarType; +import org.apache.doris.catalog.Type;w import org.apache.doris.common.util.TimeUtils; +import org.apache.doris.datasource.ExternalCatalog; +import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper; import org.apache.doris.thrift.TExprOpcode; +import com.google.common.collect.Lists; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.expressions.Expression; @@ -46,6 +53,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -63,6 +71,9 @@ public Integer initialValue() { }; static long MILLIS_TO_NANO_TIME = 1000; private static final Pattern PARTITION_REG = Pattern.compile("(\\w+)\\((\\d+)?,?(\\w+)\\)"); + // https://iceberg.apache.org/spec/#schemas-and-data-types + // All time and timestamp values are stored with microsecond precision + private static final int ICEBERG_DATETIME_SCALE_MS = 6; public static Expression convertToIcebergExpr(Expr expr, Schema schema) { if (expr == null) { @@ -211,7 +222,11 @@ private static Object extractDorisLiteral(Expr expr) { return boolLiteral.getValue(); } else if (expr instanceof DateLiteral) { DateLiteral dateLiteral = (DateLiteral) expr; - return dateLiteral.unixTimestamp(TimeUtils.getTimeZone()) * MILLIS_TO_NANO_TIME; + if (dateLiteral.isDateType()) { + return dateLiteral.getStringValue(); + } else { + return dateLiteral.unixTimestamp(TimeUtils.getTimeZone()) * MILLIS_TO_NANO_TIME; + } } else if (expr instanceof DecimalLiteral) { DecimalLiteral decimalLiteral = (DecimalLiteral) expr; return decimalLiteral.getValue(); @@ -291,4 +306,78 @@ public static PartitionSpec solveIcebergPartitionSpec(Map proper return PartitionSpec.unpartitioned(); } } + + private static Type icebergPrimitiveTypeToDorisType(org.apache.iceberg.types.Type.PrimitiveType primitive) { + switch (primitive.typeId()) { + case BOOLEAN: + return Type.BOOLEAN; + case INTEGER: + return Type.INT; + case LONG: + return Type.BIGINT; + case FLOAT: + return Type.FLOAT; + case DOUBLE: + return Type.DOUBLE; + case STRING: + case BINARY: + case UUID: + return Type.STRING; + case FIXED: + Types.FixedType fixed = (Types.FixedType) primitive; + return ScalarType.createCharType(fixed.length()); + case DECIMAL: + Types.DecimalType decimal = (Types.DecimalType) primitive; + return ScalarType.createDecimalV3Type(decimal.precision(), decimal.scale()); + case DATE: + return ScalarType.createDateV2Type(); + case TIMESTAMP: + return ScalarType.createDatetimeV2Type(ICEBERG_DATETIME_SCALE_MS); + case TIME: + return Type.UNSUPPORTED; + default: + throw new IllegalArgumentException("Cannot transform unknown type: " + primitive); + } + } + + public static Type icebergTypeToDorisType(org.apache.iceberg.types.Type type) { + if (type.isPrimitiveType()) { + return icebergPrimitiveTypeToDorisType((org.apache.iceberg.types.Type.PrimitiveType) type); + } + switch (type.typeId()) { + case LIST: + Types.ListType list = (Types.ListType) type; + return ArrayType.create(icebergTypeToDorisType(list.elementType()), true); + case MAP: + case STRUCT: + return Type.UNSUPPORTED; + default: + throw new IllegalArgumentException("Cannot transform unknown type: " + type); + } + } + + public static org.apache.iceberg.Table getIcebergTable(ExternalCatalog catalog, String dbName, String tblName) { + return Env.getCurrentEnv() + .getExtMetaCacheMgr() + .getIcebergMetadataCache() + .getIcebergTable(catalog, dbName, tblName); + } + + /** + * Get iceberg schema from catalog and convert them to doris schema + */ + public static List getSchema(ExternalCatalog catalog, String dbName, String name) { + return HiveMetaStoreClientHelper.ugiDoAs(catalog.getConfiguration(), () -> { + org.apache.iceberg.Table icebergTable = getIcebergTable(catalog, dbName, name); + Schema schema = icebergTable.schema(); + List columns = schema.columns(); + List tmpSchema = Lists.newArrayListWithCapacity(columns.size()); + for (Types.NestedField field : columns) { + tmpSchema.add(new Column(field.name().toLowerCase(Locale.ROOT), + IcebergUtils.icebergTypeToDorisType(field.type()), true, null, true, field.doc(), true, + schema.caseInsensitiveFindField(field.name()).fieldId())); + } + return tmpSchema; + }); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java index e4c33de60ec858..afbfda4c7bdf0e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java @@ -17,17 +17,17 @@ package org.apache.doris.datasource.paimon; +import org.apache.doris.catalog.TableIf; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.InitDatabaseLog; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import java.util.Comparator; +import java.util.List; +import java.util.stream.Collectors; public class PaimonExternalDatabase extends ExternalDatabase { - private static final Logger LOG = LogManager.getLogger(PaimonExternalDatabase.class); - public PaimonExternalDatabase(ExternalCatalog extCatalog, Long id, String name) { super(extCatalog, id, name, InitDatabaseLog.Type.PAIMON); } @@ -41,27 +41,4 @@ public List getTablesOnIdOrder() { // Sort the name instead, because the id may change. return getTables().stream().sorted(Comparator.comparing(TableIf::getName)).collect(Collectors.toList()); } - - @Override - public void removeMemoryTable(String tableName) { - if (LOG.isDebugEnabled()) { - LOG.debug("drop table [{}]", tableName); - } - Long tableId = tableNameToId.remove(tableName); - if (tableId == null) { - LOG.warn("drop table [{}] failed", tableName); - } - idToTbl.remove(tableId); - } - - @Override - public void addMemoryTable(String tableName, long tableId) { - if (LOG.isDebugEnabled()) { - LOG.debug("create table [{}]", tableName); - } - tableNameToId.put(tableName, tableId); - PaimonExternalTable table = new PaimonExternalTable(tableId, tableName, name, - (PaimonExternalCatalog) extCatalog); - idToTbl.put(tableId, table); - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java index f9b315cc5c09d6..33859dc5f30627 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java @@ -264,7 +264,7 @@ private static void setS3FsAccess(Map s3Properties, Map mysqlProp = Maps.newHashMap(); @@ -279,7 +279,7 @@ public static Database mockDb() throws UserException { } catch (DdlException e) { e.printStackTrace(); } - db.addMemoryTable(mysqlTable); + db.registerTable(mysqlTable); // 3. range partition olap table MaterializedIndex baseIndexP1 = new MaterializedIndex(TEST_TBL2_ID, IndexState.NORMAL); @@ -387,7 +387,7 @@ public static Database mockDb() throws UserException { olapTable2.setIndexMeta(TEST_ROLLUP_ID, TEST_ROLLUP_NAME, TEST_ROLLUP_SCHEMA, 0, ROLLUP_SCHEMA_HASH, (short) 1, TStorageType.COLUMN, KeysType.AGG_KEYS); - db.addMemoryTable(olapTable2); + db.registerTable(olapTable2); return db; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java b/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java index 7079f727a6d90e..ff60a6e8b9063b 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/backup/RestoreJobTest.java @@ -244,7 +244,7 @@ boolean await(long timeout, TimeUnit unit) { } // drop this table, cause we want to try restoring this table - db.removeMemoryTable(expectedRestoreTbl.getName()); + db.unregisterTable(expectedRestoreTbl.getName()); job = new RestoreJob(label, "2018-01-01 01:01:01", db.getId(), db.getFullName(), jobInfo, false, new ReplicaAllocation((short) 3), 100000, -1, false, false, false, env, repo.getId()); diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java index 5aa0a9ad17320d..93b7d95dbfc645 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/CatalogTestUtil.java @@ -230,7 +230,7 @@ public static Database createSimpleDb(long dbId, long tableId, long partitionId, table.setBaseIndexId(indexId); // db Database db = new Database(dbId, testDb1); - db.addMemoryTable(table); + db.registerTable(table); // add a es table to catalog try { @@ -288,7 +288,7 @@ public static void createDupTable(Database db) { TStorageType.COLUMN, KeysType.DUP_KEYS); table.setBaseIndexId(testIndexId2); // db - db.addMemoryTable(table); + db.registerTable(table); } public static void createEsTable(Database db) throws DdlException { @@ -319,7 +319,7 @@ public static void createEsTable(Database db) throws DdlException { properties.put(EsResource.KEYWORD_SNIFF, "true"); EsTable esTable = new EsTable(testEsTableId1, testEsTable1, columns, properties, partitionInfo); - db.addMemoryTable(esTable); + db.registerTable(esTable); } public static Backend createBackend(long id, String host, int heartPort, int bePort, int httpPort) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java index f18b195f61dd18..07ce2f9229b8de 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/DatabaseTest.java @@ -122,8 +122,8 @@ public void getTablesOnIdOrderOrThrowExceptionTest() throws MetaNotFoundExceptio List baseSchema2 = new LinkedList<>(); OlapTable table2 = new OlapTable(2001L, "baseTable2", baseSchema2, KeysType.DUP_KEYS, new SinglePartitionInfo(), new RandomDistributionInfo(10)); - db.addMemoryTable(table1); - db.addMemoryTable(table2); + db.registerTable(table1); + db.registerTable(table2); List tableIdList = Lists.newArrayList(2001L, 2000L); List
tableList = db.getTablesOnIdOrderOrThrowException(tableIdList); Assert.assertEquals(2, tableList.size()); @@ -138,7 +138,7 @@ public void getTableOrThrowExceptionTest() throws MetaNotFoundException { List baseSchema = new LinkedList<>(); OlapTable table = new OlapTable(2000L, "baseTable", baseSchema, KeysType.AGG_KEYS, new SinglePartitionInfo(), new RandomDistributionInfo(10)); - db.addMemoryTable(table); + db.registerTable(table); Table resultTable1 = db.getTableOrMetaException(2000L, Table.TableType.OLAP); Table resultTable2 = db.getTableOrMetaException("baseTable", Table.TableType.OLAP); Assert.assertEquals(table, resultTable1); @@ -168,9 +168,9 @@ public void createAndDropPartitionTest() { table.addPartition(partition); // create - Assert.assertTrue(db.addMemoryTable(table)); + Assert.assertTrue(db.registerTable(table)); // duplicate - Assert.assertFalse(db.addMemoryTable(table)); + Assert.assertFalse(db.registerTable(table)); Assert.assertEquals(table, db.getTableNullable(table.getId())); Assert.assertEquals(table, db.getTableNullable(table.getName())); @@ -185,11 +185,11 @@ public void createAndDropPartitionTest() { // drop // drop not exist tableFamily - db.removeMemoryTable("invalid"); + db.unregisterTable("invalid"); Assert.assertEquals(1, db.getTables().size()); - db.addMemoryTable(table); - db.removeMemoryTable(table.getName()); + db.registerTable(table); + db.unregisterTable(table.getName()); Assert.assertEquals(0, db.getTables().size()); } @@ -234,7 +234,7 @@ public void testSerialization() throws Exception { table.setIndexMeta(1L, "test", column, 1, 1, shortKeyColumnCount, TStorageType.COLUMN, KeysType.AGG_KEYS); Deencapsulation.setField(table, "baseIndexId", 1); table.addPartition(partition); - db2.addMemoryTable(table); + db2.registerTable(table); db2.write(dos); dos.flush(); diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java index 6b78249babd8f7..7b1d595c45f166 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/InfoSchemaDbTest.java @@ -29,9 +29,9 @@ public class InfoSchemaDbTest { public void testNormal() throws IOException, DdlException { Database db = new InfoSchemaDb(); - Assert.assertFalse(db.addMemoryTable(null)); + Assert.assertFalse(db.registerTable(null)); Assert.assertFalse(db.createTableWithLock(null, false, false).first); - db.removeMemoryTable("authors"); + db.unregisterTable("authors"); Assert.assertThrows(IOException.class, () -> db.write(null)); Assert.assertNull(db.getTableNullable("authors")); } diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java index 941f96875ec985..6e17867b4d3b2b 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/MysqlDbTest.java @@ -29,9 +29,9 @@ public class MysqlDbTest { public void testNormal() throws IOException, DdlException { Database db = new MysqlDb(); - Assert.assertFalse(db.addMemoryTable(null)); + Assert.assertFalse(db.registerTable(null)); Assert.assertFalse(db.createTableWithLock(null, false, false).first); - db.removeMemoryTable("authors"); + db.unregisterTable("authors"); Assert.assertThrows(IOException.class, () -> db.write(null)); Assert.assertNull(db.getTableNullable("authors")); } diff --git a/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java b/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java index 64cbd93ae5e516..62ba34cf4e3d47 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/clone/DiskRebalanceTest.java @@ -171,7 +171,7 @@ public void testDiskRebalancerWithSameUsageDisk() { olapTable = new OlapTable(2, "fake table", new ArrayList<>(), KeysType.DUP_KEYS, new RangePartitionInfo(), new HashDistributionInfo()); - db.addMemoryTable(olapTable); + db.registerTable(olapTable); // 1 table, 3 partitions p0,p1,p2 MaterializedIndex materializedIndex = new MaterializedIndex(olapTable.getId(), null); @@ -214,7 +214,7 @@ public void testDiskRebalancerWithDiffUsageDisk() { olapTable = new OlapTable(2, "fake table", new ArrayList<>(), KeysType.DUP_KEYS, new RangePartitionInfo(), new HashDistributionInfo()); - db.addMemoryTable(olapTable); + db.registerTable(olapTable); // 1 table, 3 partitions p0,p1,p2 MaterializedIndex materializedIndex = new MaterializedIndex(olapTable.getId(), null); diff --git a/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java b/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java index 35c6a47701c6d0..52ccb90a12c778 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/clone/RebalanceTest.java @@ -152,7 +152,7 @@ long ignored() { olapTable = new OlapTable(2, "fake table", new ArrayList<>(), KeysType.DUP_KEYS, new RangePartitionInfo(), new HashDistributionInfo()); - db.addMemoryTable(olapTable); + db.registerTable(olapTable); // 1 table, 3 partitions p0,p1,p2 MaterializedIndex materializedIndex = new MaterializedIndex(olapTable.getId(), null); diff --git a/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java b/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java index 00ce1be8d49c36..b16879625cdea9 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java +++ b/fe/fe-core/src/test/java/org/apache/doris/common/util/UnitTestUtil.java @@ -123,7 +123,7 @@ public static Database createDb(long dbId, long tableId, long partitionId, long // db Database db = new Database(dbId, DB_NAME); - db.addMemoryTable(table); + db.registerTable(table); return db; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java b/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java index edeb764c6af7ee..5144ddb5a1a5c4 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java +++ b/fe/fe-core/src/test/java/org/apache/doris/http/DorisHttpTestCase.java @@ -216,11 +216,11 @@ private static Env newDelegateCatalog() { //EasyMock.expect(catalog.getAuth()).andReturn(paloAuth).anyTimes(); Database db = new Database(testDbId, "testDb"); OlapTable table = newTable(TABLE_NAME); - db.addMemoryTable(table); + db.registerTable(table); OlapTable table1 = newTable(TABLE_NAME + 1); - db.addMemoryTable(table1); + db.registerTable(table1); EsTable esTable = newEsTable("es_table"); - db.addMemoryTable(esTable); + db.registerTable(esTable); InternalCatalog internalCatalog = Deencapsulation.newInstance(InternalCatalog.class); new Expectations(internalCatalog) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java b/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java index 4b322d92058906..359775a8009646 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java @@ -268,22 +268,22 @@ Env getCurrentEnv() { db = ((InternalCatalog) env.getCurrentCatalog()).getDbNullable(fullDbName); // table and view init use analyzer, should init after analyzer build OlapTable tbl1 = createOrderTable(); - db.addMemoryTable(tbl1); + db.registerTable(tbl1); OlapTable tbl2 = createProfileTable(); - db.addMemoryTable(tbl2); + db.registerTable(tbl2); OlapTable tbl3 = createEventTable(); - db.addMemoryTable(tbl3); + db.registerTable(tbl3); // build view meta inline sql and create view directly, the originStmt from inline sql // should be analyzed by create view statement analyzer and then to sql View view1 = createEventView1(); - db.addMemoryTable(view1); + db.registerTable(view1); View view2 = createEventView2(); - db.addMemoryTable(view2); + db.registerTable(view2); View view3 = createEventView3(); - db.addMemoryTable(view3); + db.registerTable(view3); View view4 = createEventNestedView(); - db.addMemoryTable(view4); + db.registerTable(view4); } private OlapTable createOrderTable() { From 3c97cc9c9abf2781828e362376d27290006c772e Mon Sep 17 00:00:00 2001 From: slothever Date: Wed, 21 Feb 2024 15:29:15 +0800 Subject: [PATCH 04/16] fix rebase comple --- .../doris/analysis/CreateTableStmt.java | 2 +- .../org/apache/doris/catalog/DatabaseIf.java | 9 +++ .../apache/doris/datasource/CatalogMgr.java | 8 +- ...logDatabase.java => DatabaseMetadata.java} | 2 +- .../{CatalogTable.java => TableMetadata.java} | 2 +- .../datasource/hive/ExternalMetadataOps.java | 38 +++++++++ .../datasource/hive/HMSCachedClient.java | 8 +- ...atabase.java => HiveDatabaseMetadata.java} | 4 +- .../datasource/hive/HiveMetadataOps.java | 7 +- ...talogTable.java => HiveTableMetadata.java} | 34 ++++---- .../hive/PostgreSQLJdbcHMSCachedClient.java | 8 +- .../hive/ThriftHMSCachedClient.java | 20 ++--- .../hive/event/AlterTableEvent.java | 4 +- .../hive/event/CreateTableEvent.java | 2 +- .../iceberg/IcebergExternalDatabase.java | 15 ---- .../iceberg/IcebergGlueExternalCatalog.java | 3 - .../iceberg/IcebergHMSExternalCatalog.java | 14 ++-- .../iceberg/IcebergMetadataOps.java | 2 - .../datasource/iceberg/IcebergUtils.java | 2 +- .../paimon/PaimonExternalDatabase.java | 10 --- .../iceberg/util/DorisTypeVisitor.java | 79 +++++++++++++++++++ 21 files changed, 184 insertions(+), 89 deletions(-) rename fe/fe-core/src/main/java/org/apache/doris/datasource/{CatalogDatabase.java => DatabaseMetadata.java} (96%) rename fe/fe-core/src/main/java/org/apache/doris/datasource/{CatalogTable.java => TableMetadata.java} (96%) rename fe/fe-core/src/main/java/org/apache/doris/datasource/hive/{HiveCatalogDatabase.java => HiveDatabaseMetadata.java} (89%) rename fe/fe-core/src/main/java/org/apache/doris/datasource/hive/{HiveCatalogTable.java => HiveTableMetadata.java} (67%) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeVisitor.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java index cb00a037331916..70fbbf3543284b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java @@ -39,7 +39,7 @@ import org.apache.doris.common.util.ParseUtil; import org.apache.doris.common.util.PrintableMap; import org.apache.doris.common.util.PropertyAnalyzer; -import org.apache.doris.external.elasticsearch.EsUtil; +import org.apache.doris.datasource.es.EsUtil; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.qe.ConnectContext; diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java index 031989ec43baec..6c0d46ffe98824 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/DatabaseIf.java @@ -263,8 +263,17 @@ default OlapTable getOlapTableOrAnalysisException(String tableName) throws Analy return (OlapTable) table; } + /** + * register table to memory + * @param table created table + * @return true if add to memory + */ boolean registerTable(TableIf table); + /** + * unregister table from memory + * @param tableName table name + */ void unregisterTable(String tableName); CatalogIf getCatalog(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index e97647d00a8f4c..7e14302714f493 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -789,9 +789,9 @@ public boolean externalTableExistInLocal(String dbName, String tableName, String return ((ExternalCatalog) catalog).tableExistInLocal(dbName, tableName); } - public void loadExternalTableFromEvent(String dbName, String tableName, - String catalogName, long updateTime, - boolean ignoreIfExists) throws DdlException { + public void registerExternalTableFromEvent(String dbName, String tableName, + String catalogName, long updateTime, + boolean ignoreIfExists) throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { throw new DdlException("No catalog found with name: " + catalogName); @@ -814,7 +814,7 @@ public void loadExternalTableFromEvent(String dbName, String tableName, } return; } - // TODO:防止event和catalog建表的tableID冲突 + // TODO:avoid tableID conflict when use event or catalog to create table long tblId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(catalog.getId(), dbName, tableName); // -1L means it will be dropped later, ignore if (tblId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/DatabaseMetadata.java similarity index 96% rename from fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogDatabase.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/DatabaseMetadata.java index 73305d099a7774..97905ce40ea3c8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/DatabaseMetadata.java @@ -17,6 +17,6 @@ package org.apache.doris.datasource; -public interface CatalogDatabase { +public interface DatabaseMetadata { String getDbName(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/TableMetadata.java similarity index 96% rename from fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogTable.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/TableMetadata.java index cae613d3c919ed..e266c519e4241e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/TableMetadata.java @@ -19,7 +19,7 @@ import java.util.Map; -public interface CatalogTable { +public interface TableMetadata { String getDbName(); String getTableName(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java index 66fb9b994b5065..5a95b86372c658 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java @@ -26,19 +26,57 @@ import java.util.List; +/** + * all external metadata operations use this interface + */ public interface ExternalMetadataOps { + /** + * create db in external metastore + * @param stmt + * @throws DdlException + */ void createDb(CreateDbStmt stmt) throws DdlException; + /** + * drop db in external metastore + * @param stmt + * @throws DdlException + */ void dropDb(DropDbStmt stmt) throws DdlException; + /** + * + * @param stmt + * @throws UserException + */ void createTable(CreateTableStmt stmt) throws UserException; + /** + * + * @param stmt + * @throws DdlException + */ void dropTable(DropTableStmt stmt) throws DdlException; + /** + * + * @return + */ List listDatabaseNames(); + /** + * + * @param db + * @return + */ List listTableNames(String db); + /** + * + * @param dbName + * @param tblName + * @return + */ boolean tableExist(String dbName, String tblName); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java index 297248e7a48d96..c26de66058bcbd 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSCachedClient.java @@ -18,8 +18,8 @@ package org.apache.doris.datasource.hive; import org.apache.doris.analysis.TableName; -import org.apache.doris.datasource.CatalogDatabase; -import org.apache.doris.datasource.CatalogTable; +import org.apache.doris.datasource.DatabaseMetadata; +import org.apache.doris.datasource.TableMetadata; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; import org.apache.hadoop.hive.common.ValidWriteIdList; @@ -83,11 +83,11 @@ void acquireSharedLock(String queryId, long txnId, String user, TableName tblNam String getCatalogLocation(String catalogName); - void createDatabase(CatalogDatabase catalogDatabase); + void createDatabase(DatabaseMetadata catalogDatabase); void dropDatabase(String dbName); void dropTable(String dbName, String tableName); - void createTable(CatalogTable catalogTable, boolean ignoreIfExists); + void createTable(TableMetadata catalogTable, boolean ignoreIfExists); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveDatabaseMetadata.java similarity index 89% rename from fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogDatabase.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveDatabaseMetadata.java index f2917996fba376..50a80db3962497 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveDatabaseMetadata.java @@ -17,14 +17,14 @@ package org.apache.doris.datasource.hive; -import org.apache.doris.datasource.CatalogDatabase; +import org.apache.doris.datasource.DatabaseMetadata; import lombok.Data; import java.util.Map; @Data -public class HiveCatalogDatabase implements CatalogDatabase { +public class HiveDatabaseMetadata implements DatabaseMetadata { private String dbName; private String locationUri; private Map properties; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index 54b5ebf4de0f81..a5fb8df58fef8c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -23,13 +23,12 @@ import org.apache.doris.analysis.DropTableStmt; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.JdbcResource; -import org.apache.doris.catalog.external.ExternalDatabase; import org.apache.doris.catalog.external.NamedExternalTable; import org.apache.doris.common.Config; import org.apache.doris.common.DdlException; import org.apache.doris.common.UserException; +import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.ExternalMetaIdMgr; -import org.apache.doris.datasource.HMSExternalCatalog; import org.apache.doris.datasource.jdbc.client.JdbcClient; import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; @@ -83,7 +82,7 @@ public void createDb(CreateDbStmt stmt) throws DdlException { Map properties = stmt.getProperties(); long id = Env.getCurrentEnv().getNextId(); try { - HiveCatalogDatabase catalogDatabase = new HiveCatalogDatabase(); + HiveDatabaseMetadata catalogDatabase = new HiveDatabaseMetadata(); catalogDatabase.setDbName(fullDbName); catalogDatabase.setProperties(properties); if (properties.containsKey("location_uri")) { @@ -123,7 +122,7 @@ public void createTable(CreateTableStmt stmt) throws UserException { "org.apache.hadoop.mapred.TextInputFormat"); // 定义成可配置的常量 String outputFormat = props.getOrDefault("output_format", "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); // 定义成可配置的常量 - HiveCatalogTable catalogTable = HiveCatalogTable.of(dbName, + HiveTableMetadata catalogTable = HiveTableMetadata.of(dbName, tblName, stmt.getColumns(), parsePartitionKeys(props), diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java similarity index 67% rename from fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogTable.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java index e799400bef0924..fa3c35e6ec380b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveCatalogTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java @@ -18,14 +18,14 @@ package org.apache.doris.datasource.hive; import org.apache.doris.catalog.Column; -import org.apache.doris.datasource.CatalogTable; +import org.apache.doris.datasource.TableMetadata; import org.apache.hadoop.hive.metastore.api.FieldSchema; import java.util.List; import java.util.Map; -public class HiveCatalogTable implements CatalogTable { +public class HiveTableMetadata implements TableMetadata { private String dbName; private String tableName; private List columns; @@ -35,13 +35,13 @@ public class HiveCatalogTable implements CatalogTable { private Map properties; // private String viewSql; - public HiveCatalogTable(String dbName, - String tblName, - List columns, - List partitionKeys, - Map props, - String inputFormat, - String outputFormat) { + public HiveTableMetadata(String dbName, + String tblName, + List columns, + List partitionKeys, + Map props, + String inputFormat, + String outputFormat) { this.dbName = dbName; this.tableName = tblName; this.columns = columns; @@ -82,13 +82,13 @@ public String getOutputFormat() { return outputFormat; } - public static HiveCatalogTable of(String dbName, - String tblName, - List columns, - List partitionKeys, - Map props, - String inputFormat, - String outputFormat) { - return new HiveCatalogTable(dbName, tblName, columns, partitionKeys, props, inputFormat, outputFormat); + public static HiveTableMetadata of(String dbName, + String tblName, + List columns, + List partitionKeys, + Map props, + String inputFormat, + String outputFormat) { + return new HiveTableMetadata(dbName, tblName, columns, partitionKeys, props, inputFormat, outputFormat); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java index 69ef45b0f6847f..e587debdb3553f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/PostgreSQLJdbcHMSCachedClient.java @@ -20,8 +20,8 @@ import org.apache.doris.analysis.TableName; import org.apache.doris.catalog.JdbcTable; import org.apache.doris.catalog.Type; -import org.apache.doris.datasource.CatalogDatabase; -import org.apache.doris.datasource.CatalogTable; +import org.apache.doris.datasource.DatabaseMetadata; +import org.apache.doris.datasource.TableMetadata; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; import org.apache.doris.thrift.TOdbcTableType; @@ -507,7 +507,7 @@ protected Type jdbcTypeToDoris(JdbcFieldSchema fieldSchema) { throw new HMSClientException("Do not support in PostgreSQLJdbcHMSCachedClient."); } - public void createDatabase(CatalogDatabase database) { + public void createDatabase(DatabaseMetadata database) { throw new NotImplementedException("PostgreSQL createDatabase not implemented"); } @@ -515,7 +515,7 @@ public void dropDatabase(String dbName) { throw new NotImplementedException("PostgreSQL dropDatabase not implemented"); } - public void createTable(CatalogTable hiveTable, boolean ignoreIfExists) { + public void createTable(TableMetadata hiveTable, boolean ignoreIfExists) { throw new NotImplementedException("PostgreSQL createTable not implemented"); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index eb4fcfe272b496..648a68ae5d76ae 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -19,8 +19,8 @@ import org.apache.doris.analysis.TableName; import org.apache.doris.common.Config; -import org.apache.doris.datasource.CatalogDatabase; -import org.apache.doris.datasource.CatalogTable; +import org.apache.doris.datasource.DatabaseMetadata; +import org.apache.doris.datasource.TableMetadata; import org.apache.doris.datasource.hive.event.MetastoreNotificationFetchException; import org.apache.doris.datasource.property.constants.HMSProperties; @@ -125,11 +125,11 @@ public List getAllTables(String dbName) { } @Override - public void createDatabase(CatalogDatabase db) { + public void createDatabase(DatabaseMetadata db) { try (ThriftHMSClient client = getClient()) { try { - if (db instanceof HiveCatalogDatabase) { - HiveCatalogDatabase hiveDb = (HiveCatalogDatabase) db; + if (db instanceof HiveDatabaseMetadata) { + HiveDatabaseMetadata hiveDb = (HiveDatabaseMetadata) db; ugiDoAs(() -> { client.client.createDatabase(toHiveDatabase(hiveDb)); return null; @@ -145,7 +145,7 @@ public void createDatabase(CatalogDatabase db) { } @NotNull - private static Database toHiveDatabase(HiveCatalogDatabase hiveDb) { + private static Database toHiveDatabase(HiveDatabaseMetadata hiveDb) { Database database = new Database(); database.setName(hiveDb.getDbName()); if (StringUtils.isNotEmpty(hiveDb.getLocationUri())) { @@ -157,7 +157,7 @@ private static Database toHiveDatabase(HiveCatalogDatabase hiveDb) { } @Override - public void createTable(CatalogTable tbl, boolean ignoreIfExists) { + public void createTable(TableMetadata tbl, boolean ignoreIfExists) { if (tableExists(tbl.getDbName(), tbl.getTableName())) { return; } @@ -169,9 +169,9 @@ public void createTable(CatalogTable tbl, boolean ignoreIfExists) { // String outputFormat, // Map parameters // parameters.put("", "doris created") - if (tbl instanceof HiveCatalogTable) { + if (tbl instanceof HiveTableMetadata) { ugiDoAs(() -> { - client.client.createTable(toHiveTable((HiveCatalogTable) tbl)); + client.client.createTable(toHiveTable((HiveTableMetadata) tbl)); return null; }); } @@ -184,7 +184,7 @@ public void createTable(CatalogTable tbl, boolean ignoreIfExists) { } } - private static Table toHiveTable(HiveCatalogTable hiveTable) { + private static Table toHiveTable(HiveTableMetadata hiveTable) { Table table = new Table(); table.setDbName(hiveTable.getDbName()); table.setTableName(hiveTable.getTableName()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java index 89d5c4a65270a9..222689075fc59a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java @@ -100,7 +100,7 @@ private void processRecreateTable() throws DdlException { Env.getCurrentEnv().getCatalogMgr() .unloadExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); Env.getCurrentEnv().getCatalogMgr() - .loadExternalTableFromEvent( + .registerExternalTableFromEvent( tableAfter.getDbName(), tableAfter.getTableName(), catalogName, eventTime, true); } @@ -119,7 +119,7 @@ private void processRename() throws DdlException { Env.getCurrentEnv().getCatalogMgr() .unloadExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); Env.getCurrentEnv().getCatalogMgr() - .loadExternalTableFromEvent( + .registerExternalTableFromEvent( tableAfter.getDbName(), tableAfter.getTableName(), catalogName, eventTime, true); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java index 2d8967dbaf5f04..e6c3e2e7eae9d0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateTableEvent.java @@ -80,7 +80,7 @@ protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}],tableName:[{}]", catalogName, dbName, tblName); Env.getCurrentEnv().getCatalogMgr() - .loadExternalTableFromEvent(dbName, hmsTbl.getTableName(), catalogName, eventTime, true); + .registerExternalTableFromEvent(dbName, hmsTbl.getTableName(), catalogName, eventTime, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java index 6875364437b8e1..5bc31e31cf41f3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalDatabase.java @@ -17,22 +17,12 @@ package org.apache.doris.datasource.iceberg; -import org.apache.doris.catalog.TableIf; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.InitDatabaseLog; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import java.util.Comparator; -import java.util.List; -import java.util.stream.Collectors; - public class IcebergExternalDatabase extends ExternalDatabase { - private static final Logger LOG = LogManager.getLogger(IcebergExternalDatabase.class); - public IcebergExternalDatabase(ExternalCatalog extCatalog, Long id, String name) { super(extCatalog, id, name, InitDatabaseLog.Type.ICEBERG); } @@ -41,9 +31,4 @@ public IcebergExternalDatabase(ExternalCatalog extCatalog, Long id, String name) protected IcebergExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new IcebergExternalTable(tblId, tableName, name, (IcebergExternalCatalog) extCatalog); } - - public List getTablesOnIdOrder() { - // Sort the name instead, because the id may change. - return getTables().stream().sorted(Comparator.comparing(TableIf::getName)).collect(Collectors.toList()); - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java index 6b00330d0fd15a..0c5bd6e1ba5948 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java @@ -25,12 +25,9 @@ import org.apache.iceberg.CatalogProperties; import org.apache.iceberg.aws.glue.GlueCatalog; import org.apache.iceberg.aws.s3.S3FileIOProperties; -import org.apache.iceberg.catalog.Namespace; -import org.apache.iceberg.catalog.SupportsNamespaces; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; public class IcebergGlueExternalCatalog extends IcebergExternalCatalog { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java index bb3c8c4d83a41b..f8b8e6e17fdfb0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java @@ -17,12 +17,12 @@ package org.apache.doris.datasource.iceberg; -import org.apache.doris.catalog.AuthType; -import org.apache.doris.catalog.HdfsResource; import org.apache.doris.common.Config; +import org.apache.doris.common.security.authentication.AuthType; +import org.apache.doris.common.security.authentication.AuthenticationConfig; import org.apache.doris.datasource.CatalogProperty; -import org.apache.doris.datasource.HMSClientException; import org.apache.doris.datasource.hive.HMSCachedClient; +import org.apache.doris.datasource.hive.HMSClientException; import org.apache.doris.datasource.hive.HiveMetadataOps; import org.apache.doris.datasource.property.PropertyConverter; import org.apache.doris.datasource.property.constants.HMSProperties; @@ -61,9 +61,9 @@ protected void initLocalObjectsImpl() { hiveConf.set(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.name(), String.valueOf(Config.hive_metastore_client_timeout_second)); String authentication = catalogProperty.getOrDefault( - HdfsResource.HADOOP_SECURITY_AUTHENTICATION, ""); + AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, ""); if (AuthType.KERBEROS.getDesc().equals(authentication)) { - hiveConf.set(HdfsResource.HADOOP_SECURITY_AUTHENTICATION, authentication); + hiveConf.set(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, authentication); UserGroupInformation.setConfiguration(hiveConf); try { /** @@ -72,8 +72,8 @@ protected void initLocalObjectsImpl() { * it will relogin when TGT is expired, so we don't need to relogin manually. */ UserGroupInformation.loginUserFromKeytab( - catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_PRINCIPAL, ""), - catalogProperty.getOrDefault(HdfsResource.HADOOP_KERBEROS_KEYTAB, "")); + catalogProperty.getOrDefault(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL, ""), + catalogProperty.getOrDefault(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB, "")); } catch (IOException e) { throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName()); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java index ed35e613146e93..fc8457f4d78a2c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java @@ -22,7 +22,6 @@ import org.apache.doris.analysis.DropDbStmt; import org.apache.doris.analysis.DropTableStmt; import org.apache.doris.catalog.Column; -import org.apache.doris.catalog.Env; import org.apache.doris.catalog.StructField; import org.apache.doris.catalog.StructType; import org.apache.doris.common.AnalysisException; @@ -33,7 +32,6 @@ import org.apache.doris.datasource.hive.ExternalMetadataOps; import org.apache.doris.external.iceberg.util.DorisTypeToType; import org.apache.doris.external.iceberg.util.DorisTypeVisitor; -import org.apache.doris.external.iceberg.util.IcebergUtils; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java index ffb17ea852f8e7..e2a45b4db6ef76 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java @@ -36,7 +36,7 @@ import org.apache.doris.catalog.Column; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.ScalarType; -import org.apache.doris.catalog.Type;w +import org.apache.doris.catalog.Type; import org.apache.doris.common.util.TimeUtils; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java index afbfda4c7bdf0e..fc0b614920f949 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalDatabase.java @@ -17,15 +17,10 @@ package org.apache.doris.datasource.paimon; -import org.apache.doris.catalog.TableIf; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.InitDatabaseLog; -import java.util.Comparator; -import java.util.List; -import java.util.stream.Collectors; - public class PaimonExternalDatabase extends ExternalDatabase { public PaimonExternalDatabase(ExternalCatalog extCatalog, Long id, String name) { @@ -36,9 +31,4 @@ public PaimonExternalDatabase(ExternalCatalog extCatalog, Long id, String name) protected PaimonExternalTable newExternalTable(String tableName, long tblId, ExternalCatalog catalog) { return new PaimonExternalTable(tblId, tableName, name, (PaimonExternalCatalog) extCatalog); } - - public List getTablesOnIdOrder() { - // Sort the name instead, because the id may change. - return getTables().stream().sorted(Comparator.comparing(TableIf::getName)).collect(Collectors.toList()); - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeVisitor.java new file mode 100644 index 00000000000000..8393672bb956a6 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeVisitor.java @@ -0,0 +1,79 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.external.iceberg.util; + +import org.apache.doris.catalog.ArrayType; +import org.apache.doris.catalog.MapType; +import org.apache.doris.catalog.StructField; +import org.apache.doris.catalog.StructType; +import org.apache.doris.catalog.Type; + +import com.google.common.collect.Lists; + +import java.util.List; + +/** + * Utils to visit doris and iceberg type + * @param + */ +public class DorisTypeVisitor { + public static T visit(Type type, DorisTypeVisitor visitor) { + if (type instanceof StructType) { + List fields = ((StructType) type).getFields(); + List fieldResults = Lists.newArrayListWithExpectedSize(fields.size()); + + for (StructField field : fields) { + fieldResults.add(visitor.field( + field, + visit(field.getType(), visitor))); + } + + return visitor.struct((StructType) type, fieldResults); + } else if (type instanceof MapType) { + return visitor.map((MapType) type, + visit(((MapType) type).getKeyType(), visitor), + visit(((MapType) type).getValueType(), visitor)); + } else if (type instanceof ArrayType) { + return visitor.array( + (ArrayType) type, + visit(((ArrayType) type).getItemType(), visitor)); + } else { + return visitor.atomic(type); + } + } + + public T struct(StructType struct, List fieldResults) { + return null; + } + + public T field(StructField field, T typeResult) { + return null; + } + + public T array(ArrayType array, T elementResult) { + return null; + } + + public T map(MapType map, T keyResult, T valueResult) { + return null; + } + + public T atomic(Type atomic) { + return null; + } +} From f862a4f74f65ee875d6df6fb1272ea2d52e393af Mon Sep 17 00:00:00 2001 From: slothever Date: Wed, 21 Feb 2024 16:55:26 +0800 Subject: [PATCH 05/16] add some config --- .../java/org/apache/doris/common/Config.java | 11 ++++ .../apache/doris/datasource/CatalogMgr.java | 7 ++- .../hive/HiveMetaStoreClientHelper.java | 28 ++++++++++ .../datasource/hive/HiveMetadataOps.java | 8 ++- .../hive/ThriftHMSCachedClient.java | 53 +++++++++++++------ .../hive/event/AlterDatabaseEvent.java | 4 +- .../hive/event/AlterTableEvent.java | 4 +- .../hive/event/CreateDatabaseEvent.java | 2 +- .../hive/event/DropDatabaseEvent.java | 2 +- .../datasource/hive/event/DropTableEvent.java | 2 +- 10 files changed, 90 insertions(+), 31 deletions(-) diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java index 86ed91d3c08dcc..2f3eeb06173f85 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java @@ -2201,6 +2201,17 @@ public class Config extends ConfigBase { "Sample size for hive row count estimation."}) public static int hive_stats_partition_sample_size = 3000; + @ConfField(mutable = true, masterOnly = true, description = { + "Hive创建外部表默认指定的input format", + "Default hive input format for creating table."}) + public static String hive_default_input_format = "org.apache.hadoop.mapred.TextInputFormat"; + + @ConfField(mutable = true, masterOnly = true, description = { + "Hive创建外部表默认指定的output format", + "Default hive output format for creating table."}) + public static String hive_default_output_format = "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"; + + @ConfField public static int statistics_sql_parallel_exec_instance_num = 1; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index 7e14302714f493..4bcb029b2b59c2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -745,7 +745,7 @@ public void replayRefreshExternalTable(ExternalObjectLog log) { } } - public void unloadExternalTable(String dbName, String tableName, String catalogName, boolean ignoreIfExists) + public void unregisterExternalTable(String dbName, String tableName, String catalogName, boolean ignoreIfExists) throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { @@ -814,7 +814,6 @@ public void registerExternalTableFromEvent(String dbName, String tableName, } return; } - // TODO:avoid tableID conflict when use event or catalog to create table long tblId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(catalog.getId(), dbName, tableName); // -1L means it will be dropped later, ignore if (tblId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { @@ -831,7 +830,7 @@ public void registerExternalTableFromEvent(String dbName, String tableName, } } - public void removeExternalDatabase(String dbName, String catalogName, boolean ignoreIfNotExists) + public void unregisterExternalDatabase(String dbName, String catalogName, boolean ignoreIfNotExists) throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { @@ -850,7 +849,7 @@ public void removeExternalDatabase(String dbName, String catalogName, boolean ig ((HMSExternalCatalog) catalog).unregisterDatabase(dbName); } - public void addExternalDatabase(String dbName, String catalogName, boolean ignoreIfExists) + public void registerExternalDatabase(String dbName, String catalogName, boolean ignoreIfExists) throws DdlException { CatalogIf catalog = nameToCatalog.get(catalogName); if (catalog == null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java index 4343fa1ff5baa1..fc3ac4bf8c4e5e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java @@ -584,6 +584,34 @@ private static int findNextNestedField(String commaSplitFields) { return commaSplitFields.length(); } + /** + * Convert doris type to hive type. + */ + public static String dorisTypeToHiveType(Type dorisType) { + if (dorisType.equals(Type.BOOLEAN)) { + return "boolean"; + } else if (dorisType.equals(Type.TINYINT)) { + return "tinyint"; + } else if (dorisType.equals(Type.SMALLINT)) { + return "smallint"; + } else if (dorisType.equals(Type.INT)) { + return "int"; + } else if (dorisType.equals(Type.BIGINT)) { + return "bigint"; + } else if (dorisType.equals(Type.DATE) || dorisType.equals(Type.DATEV2)) { + return "date"; + } else if (dorisType.equals(Type.DATETIME) || dorisType.equals(Type.DATETIMEV2)) { + return "timestamp"; + } else if (dorisType.equals(Type.FLOAT)) { + return "float"; + } else if (dorisType.equals(Type.DOUBLE)) { + return "double"; + } else if (dorisType.equals(Type.STRING)) { + return "string"; + } + return "string"; + } + /** * Convert hive type to doris type. */ diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index a5fb8df58fef8c..b0debe8744a1c6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -118,17 +118,15 @@ public void createTable(CreateTableStmt stmt) throws UserException { } try { Map props = stmt.getExtProperties(); - String inputFormat = props.getOrDefault("input_format", - "org.apache.hadoop.mapred.TextInputFormat"); // 定义成可配置的常量 - String outputFormat = props.getOrDefault("output_format", - "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); // 定义成可配置的常量 + String inputFormat = props.getOrDefault("input_format", Config.hive_default_input_format); + String outputFormat = props.getOrDefault("output_format", Config.hive_default_output_format); HiveTableMetadata catalogTable = HiveTableMetadata.of(dbName, tblName, stmt.getColumns(), parsePartitionKeys(props), props, inputFormat, - outputFormat); // use HiveCatalogTable.of + outputFormat); client.createTable(catalogTable, stmt.isSetIfNotExists()); // TODO: need add first, use increased id diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index 648a68ae5d76ae..8099b0e03a9d09 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -18,6 +18,7 @@ package org.apache.doris.datasource.hive; import org.apache.doris.analysis.TableName; +import org.apache.doris.catalog.Column; import org.apache.doris.common.Config; import org.apache.doris.datasource.DatabaseMetadata; import org.apache.doris.datasource.TableMetadata; @@ -59,15 +60,16 @@ import org.apache.hadoop.hive.metastore.txn.TxnUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Queue; @@ -144,7 +146,6 @@ public void createDatabase(DatabaseMetadata db) { } } - @NotNull private static Database toHiveDatabase(HiveDatabaseMetadata hiveDb) { Database database = new Database(); database.setName(hiveDb.getDbName()); @@ -163,12 +164,7 @@ public void createTable(TableMetadata tbl, boolean ignoreIfExists) { } try (ThriftHMSClient client = getClient()) { try { - // sd: List cols, // String location, - // String inputFormat, - // String outputFormat, - // Map parameters - // parameters.put("", "doris created") if (tbl instanceof HiveTableMetadata) { ugiDoAs(() -> { client.client.createTable(toHiveTable((HiveTableMetadata) tbl)); @@ -185,6 +181,8 @@ public void createTable(TableMetadata tbl, boolean ignoreIfExists) { } private static Table toHiveTable(HiveTableMetadata hiveTable) { + Objects.requireNonNull(hiveTable.getDbName(), "Hive database name should be not null"); + Objects.requireNonNull(hiveTable.getTableName(), "Hive table name should be not null"); Table table = new Table(); table.setDbName(hiveTable.getDbName()); table.setTableName(hiveTable.getTableName()); @@ -193,14 +191,11 @@ private static Table toHiveTable(HiveTableMetadata hiveTable) { table.setCreateTime(createTime); table.setLastAccessTime(createTime); // table.setRetention(0); - StorageDescriptor sd = new StorageDescriptor(); - // sd.setCols(toHiveColumns(hiveTable.getColumns())); - sd.setInputFormat(hiveTable.getInputFormat()); - sd.setOutputFormat(hiveTable.getOutputFormat()); - Map parameters = new HashMap<>(); - parameters.put("tag", "doris created"); - sd.setParameters(parameters); - table.setSd(sd); + String location = hiveTable.getProperties().get("external_location"); + table.setSd(toHiveStorageDesc(hiveTable.getColumns(), + hiveTable.getInputFormat(), + hiveTable.getOutputFormat(), + location)); table.setPartitionKeys(hiveTable.getPartitionKeys()); // table.setViewOriginalText(hiveTable.getViewSql()); // table.setViewExpandedText(hiveTable.getViewSql()); @@ -209,6 +204,34 @@ private static Table toHiveTable(HiveTableMetadata hiveTable) { return table; } + private static StorageDescriptor toHiveStorageDesc(List columns, String inputFormat, String outputFormat, + String location) { + StorageDescriptor sd = new StorageDescriptor(); + sd.setCols(toHiveColumns(columns)); + sd.setInputFormat(inputFormat); + sd.setOutputFormat(outputFormat); + if (StringUtils.isNotEmpty(location)) { + sd.setLocation(location); + } + Map parameters = new HashMap<>(); + parameters.put("tag", "doris external hive talbe"); + sd.setParameters(parameters); + return sd; + } + + private static List toHiveColumns(List columns) { + List result = new ArrayList<>(); + for (Column column : columns) { + FieldSchema hiveFieldSchema = new FieldSchema(); + // TODO: refactor atlr4 file to support hive column type + hiveFieldSchema.setType(HiveMetaStoreClientHelper.dorisTypeToHiveType(column.getType())); + hiveFieldSchema.setName(column.getName()); + hiveFieldSchema.setComment(column.getComment()); + result.add(hiveFieldSchema); + } + return result; + } + @Override public void dropDatabase(String dbName) { try (ThriftHMSClient client = getClient()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java index 79433197332167..c4529d5d48747e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterDatabaseEvent.java @@ -87,8 +87,8 @@ private void processRename() throws DdlException { catalogName, dbAfter.getName()); return; } - Env.getCurrentEnv().getCatalogMgr().removeExternalDatabase(dbBefore.getName(), catalogName, true); - Env.getCurrentEnv().getCatalogMgr().addExternalDatabase(dbAfter.getName(), catalogName, true); + Env.getCurrentEnv().getCatalogMgr().unregisterExternalDatabase(dbBefore.getName(), catalogName, true); + Env.getCurrentEnv().getCatalogMgr().registerExternalDatabase(dbAfter.getName(), catalogName, true); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java index 222689075fc59a..1567960b7f5b58 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/AlterTableEvent.java @@ -98,7 +98,7 @@ private void processRecreateTable() throws DdlException { return; } Env.getCurrentEnv().getCatalogMgr() - .unloadExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); + .unregisterExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); Env.getCurrentEnv().getCatalogMgr() .registerExternalTableFromEvent( tableAfter.getDbName(), tableAfter.getTableName(), catalogName, eventTime, true); @@ -117,7 +117,7 @@ private void processRename() throws DdlException { return; } Env.getCurrentEnv().getCatalogMgr() - .unloadExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); + .unregisterExternalTable(tableBefore.getDbName(), tableBefore.getTableName(), catalogName, true); Env.getCurrentEnv().getCatalogMgr() .registerExternalTableFromEvent( tableAfter.getDbName(), tableAfter.getTableName(), catalogName, eventTime, true); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java index 2908cd3286707b..8829d2ae98831a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/CreateDatabaseEvent.java @@ -55,7 +55,7 @@ protected static List getEvents(NotificationEvent event, protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}]", catalogName, dbName); - Env.getCurrentEnv().getCatalogMgr().addExternalDatabase(dbName, catalogName, true); + Env.getCurrentEnv().getCatalogMgr().registerExternalDatabase(dbName, catalogName, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java index f75d3f25888d89..107ce591a42b03 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropDatabaseEvent.java @@ -55,7 +55,7 @@ protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}]", catalogName, dbName); Env.getCurrentEnv().getCatalogMgr() - .removeExternalDatabase(dbName, catalogName, true); + .unregisterExternalDatabase(dbName, catalogName, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java index 335d6e312e2335..6dcb16dedad369 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/event/DropTableEvent.java @@ -78,7 +78,7 @@ protected boolean willChangeTableName() { protected void process() throws MetastoreNotificationException { try { infoLog("catalogName:[{}],dbName:[{}],tableName:[{}]", catalogName, dbName, tableName); - Env.getCurrentEnv().getCatalogMgr().unloadExternalTable(dbName, tableName, catalogName, true); + Env.getCurrentEnv().getCatalogMgr().unregisterExternalTable(dbName, tableName, catalogName, true); } catch (DdlException e) { throw new MetastoreNotificationException( debugString("Failed to process event"), e); From 3d2463d2c21b56dccfe9c8b27566a0a9a6694425 Mon Sep 17 00:00:00 2001 From: wuwenchi Date: Wed, 21 Feb 2024 17:41:14 +0800 Subject: [PATCH 06/16] fix rebase --- .../iceberg/util => datasource}/DorisTypeVisitor.java | 2 +- .../iceberg/DorisTypeToIcebergType.java} | 9 +++++---- .../doris/datasource/iceberg/IcebergMetadataOps.java | 7 +++---- 3 files changed, 9 insertions(+), 9 deletions(-) rename fe/fe-core/src/main/java/org/apache/doris/{external/iceberg/util => datasource}/DorisTypeVisitor.java (98%) rename fe/fe-core/src/main/java/org/apache/doris/{external/iceberg/util/DorisTypeToType.java => datasource/iceberg/DorisTypeToIcebergType.java} (95%) diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/DorisTypeVisitor.java similarity index 98% rename from fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeVisitor.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/DorisTypeVisitor.java index 8393672bb956a6..54a35acf595a59 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/DorisTypeVisitor.java @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -package org.apache.doris.external.iceberg.util; +package org.apache.doris.datasource; import org.apache.doris.catalog.ArrayType; import org.apache.doris.catalog.MapType; diff --git a/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeToType.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/DorisTypeToIcebergType.java similarity index 95% rename from fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeToType.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/DorisTypeToIcebergType.java index 52dd7446cc25ff..d6370c583dafbe 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/external/iceberg/util/DorisTypeToType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/DorisTypeToIcebergType.java @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -package org.apache.doris.external.iceberg.util; +package org.apache.doris.datasource.iceberg; import org.apache.doris.catalog.ArrayType; import org.apache.doris.catalog.MapType; @@ -23,6 +23,7 @@ import org.apache.doris.catalog.ScalarType; import org.apache.doris.catalog.StructField; import org.apache.doris.catalog.StructType; +import org.apache.doris.datasource.DorisTypeVisitor; import com.google.common.collect.Lists; import org.apache.iceberg.types.Type; @@ -34,15 +35,15 @@ /** * Convert Doris type to Iceberg type */ -public class DorisTypeToType extends DorisTypeVisitor { +public class DorisTypeToIcebergType extends DorisTypeVisitor { private final StructType root; private int nextId = 0; - public DorisTypeToType() { + public DorisTypeToIcebergType() { this.root = null; } - public DorisTypeToType(StructType root) { + public DorisTypeToIcebergType(StructType root) { this.root = root; // the root struct's fields use the first ids this.nextId = root.getFields().size(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java index fc8457f4d78a2c..f50045c606e4a5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java @@ -29,9 +29,8 @@ import org.apache.doris.common.FeNameFormat; import org.apache.doris.common.UserException; import org.apache.doris.common.util.Util; +import org.apache.doris.datasource.DorisTypeVisitor; import org.apache.doris.datasource.hive.ExternalMetadataOps; -import org.apache.doris.external.iceberg.util.DorisTypeToType; -import org.apache.doris.external.iceberg.util.DorisTypeVisitor; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; @@ -96,7 +95,6 @@ public void createDb(CreateDbStmt stmt) throws DdlException { String dbName = stmt.getFullDbName(); Map properties = stmt.getProperties(); nsCatalog.createNamespace(Namespace.of(dbName), properties); - // TODO 增加刷新流程,否则create之后,show不出来,只能refresh之后才能show出来 } @Override @@ -120,7 +118,8 @@ public void createTable(CreateTableStmt stmt) throws UserException { .map(col -> new StructField(col.getName(), col.getType(), col.getComment(), col.isAllowNull())) .collect(Collectors.toList()); StructType structType = new StructType(new ArrayList<>(collect)); - org.apache.iceberg.types.Type visit = DorisTypeVisitor.visit(structType, new DorisTypeToType(structType)); + org.apache.iceberg.types.Type visit = + DorisTypeVisitor.visit(structType, new DorisTypeToIcebergType(structType)); Schema schema = new Schema(visit.asNestedType().asStructType().fields()); Map properties = stmt.getProperties(); PartitionSpec partitionSpec = IcebergUtils.solveIcebergPartitionSpec(properties, schema); From 2280a6f69f3a7391a8244b04eefb47514291d736 Mon Sep 17 00:00:00 2001 From: slothever Date: Thu, 22 Feb 2024 14:34:43 +0800 Subject: [PATCH 07/16] fix rebase --- .../src/main/java/org/apache/doris/catalog/Env.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index 0c4b38f93a7125..8af10afbba9c1e 100755 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -2966,7 +2966,7 @@ public Frontend getFeByName(String name) { // The interface which DdlExecutor needs. public void createDb(CreateDbStmt stmt) throws DdlException { - getInternalCatalog().createDb(stmt); + getCurrentCatalog().createDb(stmt); } // For replay edit log, need't lock metadata @@ -2979,7 +2979,7 @@ public void replayCreateDb(Database db) { } public void dropDb(DropDbStmt stmt) throws DdlException { - getInternalCatalog().dropDb(stmt); + getCurrentCatalog().dropDb(stmt); } public void replayDropDb(String dbName, boolean isForceDrop, Long recycleTime) throws DdlException { @@ -3051,7 +3051,7 @@ public void replayRenameDatabase(String dbName, String newDbName) { * 11. add this table to ColocateGroup if necessary */ public void createTable(CreateTableStmt stmt) throws UserException { - getInternalCatalog().createTable(stmt); + getCurrentCatalog().createTable(stmt); } public void createTableLike(CreateTableLikeStmt stmt) throws DdlException { @@ -3681,7 +3681,7 @@ public void replayAlterExternalTableSchema(String dbName, String tableName, List // Drop table public void dropTable(DropTableStmt stmt) throws DdlException { - getInternalCatalog().dropTable(stmt); + getCurrentCatalog().dropTable(stmt); } public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, boolean isReplay, From e69681b7eb2a00b1b3a1ff25e3519cb0e4e8c29a Mon Sep 17 00:00:00 2001 From: wuwenchi Date: Thu, 22 Feb 2024 18:17:59 +0800 Subject: [PATCH 08/16] add engineName:iceberg add exception add log --- .../doris/datasource/ExternalCatalog.java | 32 ++++++++++++++++--- .../iceberg/IcebergExternalCatalog.java | 2 +- .../datasource/iceberg/IcebergUtils.java | 8 +++-- .../plans/commands/info/CreateTableInfo.java | 2 +- 4 files changed, 35 insertions(+), 9 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index 998b8a0eefee2c..24e7b0938eb2b2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -594,36 +594,60 @@ public void addDatabaseForTest(ExternalDatabase db) { public void createDb(CreateDbStmt stmt) throws DdlException { makeSureInitialized(); if (metadataOps == null) { + LOG.warn("dropDatabase not implemented"); throw new NotImplementedException("dropDatabase not implemented"); } - metadataOps.createDb(stmt); + try { + metadataOps.createDb(stmt); + } catch (Exception e) { + LOG.warn("Failed to create a database.", e); + throw e; + } } @Override public void dropDb(DropDbStmt stmt) throws DdlException { makeSureInitialized(); if (metadataOps == null) { + LOG.warn("dropDatabase not implemented"); throw new NotImplementedException("dropDatabase not implemented"); } - metadataOps.dropDb(stmt); + try { + metadataOps.dropDb(stmt); + } catch (Exception e) { + LOG.warn("Failed to drop a database.", e); + throw e; + } } @Override public void createTable(CreateTableStmt stmt) throws UserException { makeSureInitialized(); if (metadataOps == null) { + LOG.warn("createTable not implemented"); throw new NotImplementedException("createTable not implemented"); } - metadataOps.createTable(stmt); + try { + metadataOps.createTable(stmt); + } catch (Exception e) { + LOG.warn("Failed to create a table.", e); + throw e; + } } @Override public void dropTable(DropTableStmt stmt) throws DdlException { makeSureInitialized(); if (metadataOps == null) { + LOG.warn("dropTable not implemented"); throw new NotImplementedException("dropTable not implemented"); } - metadataOps.dropTable(stmt); + try { + metadataOps.dropTable(stmt); + } catch (Exception e) { + LOG.warn("Failed to drop a table", e); + throw e; + } } public void unregisterDatabase(String dbName) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java index f791d5bf83ab42..31d97174324c34 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java @@ -43,8 +43,8 @@ public IcebergExternalCatalog(long catalogId, String name, String comment) { @Override protected void init() { - super.init(); metadataOps = ExternalMetadataOperations.newIcebergMetadataOps(this, catalog); + super.init(); } public Catalog getCatalog() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java index e2a45b4db6ef76..f88e4de513ec9f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java @@ -37,6 +37,7 @@ import org.apache.doris.catalog.Env; import org.apache.doris.catalog.ScalarType; import org.apache.doris.catalog.Type; +import org.apache.doris.common.UserException; import org.apache.doris.common.util.TimeUtils; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper; @@ -256,7 +257,8 @@ private static SlotRef convertDorisExprToSlotRef(Expr expr) { } // "partition"="c1;day(c1);bucket(4,c3)" - public static PartitionSpec solveIcebergPartitionSpec(Map properties, Schema schema) { + public static PartitionSpec solveIcebergPartitionSpec(Map properties, Schema schema) + throws UserException { if (properties.containsKey("partition")) { PartitionSpec.Builder builder = PartitionSpec.builderFor(schema); String par = properties.get("partition").replaceAll(" ", ""); @@ -291,10 +293,10 @@ public static PartitionSpec solveIcebergPartitionSpec(Map proper builder.truncate(matcher.group(3), Integer.parseInt(matcher.group(2))); break; default: - LOG.warn("unsupported partition for " + matcher.group(1)); + throw new UserException("unsupported partition for " + matcher.group(1)); } } else { - LOG.warn("failed to get partition info from " + func); + throw new UserException("failed to get partition info from " + func); } } else { builder.identity(func); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java index 47b731b602f575..3620f2e0413220 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java @@ -622,7 +622,7 @@ private boolean checkPartitionsTypes() { private void checkEngineName() { if (engineName.equals("mysql") || engineName.equals("odbc") || engineName.equals("broker") - || engineName.equals("elasticsearch") || engineName.equals("hive") + || engineName.equals("elasticsearch") || engineName.equals("hive") || engineName.equals("iceberg") || engineName.equals("jdbc")) { if (!isExternal) { // this is for compatibility From e5b30f326b68ee5823054b06fce5a71285e00e22 Mon Sep 17 00:00:00 2001 From: slothever Date: Thu, 22 Feb 2024 18:32:44 +0800 Subject: [PATCH 09/16] fix create db --- .../main/java/org/apache/doris/common/Config.java | 4 ++++ .../doris/datasource/hive/HMSExternalCatalog.java | 2 +- .../doris/datasource/hive/HiveMetadataOps.java | 14 ++++++++++---- .../doris/datasource/hive/HiveTableMetadata.java | 14 +++++++++++--- .../datasource/hive/ThriftHMSCachedClient.java | 7 ++++++- 5 files changed, 32 insertions(+), 9 deletions(-) diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java index 2f3eeb06173f85..b8e16435e371cd 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java @@ -2211,6 +2211,10 @@ public class Config extends ConfigBase { "Default hive output format for creating table."}) public static String hive_default_output_format = "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"; + @ConfField(mutable = true, masterOnly = true, description = { + "Hive创建外部表默认指定的SerDe类", + "Default hive serde class for creating table."}) + public static String hive_default_serde = "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; @ConfField public static int statistics_sql_parallel_exec_instance_num = 1; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java index e4581e67ddd3e8..3b6ef978bd98fe 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java @@ -200,7 +200,7 @@ public void registerDatabase(long dbId, String dbName) { if (LOG.isDebugEnabled()) { LOG.debug("create database [{}]", dbName); } - dbNameToId.put(dbName, dbId); + dbNameToId.put(ClusterNamespace.getNameFromFullName(dbName), dbId); ExternalDatabase db = getDbForInit(dbName, dbId, logType); idToDb.put(dbId, db); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index b0debe8744a1c6..e2190eb277f78a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -51,6 +51,7 @@ public class HiveMetadataOps implements ExternalMetadataOps { private HMSCachedClient client; public HiveMetadataOps(HiveConf hiveConf, JdbcClientConfig jdbcClientConfig, HMSExternalCatalog catalog) { + this.catalog = catalog; this.hiveConf = hiveConf; this.jdbcClientConfig = jdbcClientConfig; this.client = createCachedClient(hiveConf, @@ -80,7 +81,7 @@ public static HMSCachedClient createCachedClient(HiveConf hiveConf, int thriftCl public void createDb(CreateDbStmt stmt) throws DdlException { String fullDbName = stmt.getFullDbName(); Map properties = stmt.getProperties(); - long id = Env.getCurrentEnv().getNextId(); + long dbId = Env.getCurrentEnv().getNextId(); try { HiveDatabaseMetadata catalogDatabase = new HiveDatabaseMetadata(); catalogDatabase.setDbName(fullDbName); @@ -90,11 +91,11 @@ public void createDb(CreateDbStmt stmt) throws DdlException { } catalogDatabase.setComment(properties.getOrDefault("comment", "")); client.createDatabase(catalogDatabase); - catalog.registerDatabase(id, fullDbName); + catalog.registerDatabase(dbId, fullDbName); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } - LOG.info("createDb dbName = " + fullDbName + ", id = " + id); + LOG.info("createDb dbName = " + fullDbName + ", id = " + dbId); } @Override @@ -103,6 +104,7 @@ public void dropDb(DropDbStmt stmt) throws DdlException { try { client.dropDatabase(dbName); catalog.unregisterDatabase(dbName); + catalog.onRefresh(true); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } @@ -120,13 +122,15 @@ public void createTable(CreateTableStmt stmt) throws UserException { Map props = stmt.getExtProperties(); String inputFormat = props.getOrDefault("input_format", Config.hive_default_input_format); String outputFormat = props.getOrDefault("output_format", Config.hive_default_output_format); + String serDe = props.getOrDefault("serde", Config.hive_default_serde); HiveTableMetadata catalogTable = HiveTableMetadata.of(dbName, tblName, stmt.getColumns(), parsePartitionKeys(props), props, inputFormat, - outputFormat); + outputFormat, + serDe); client.createTable(catalogTable, stmt.isSetIfNotExists()); // TODO: need add first, use increased id @@ -135,6 +139,7 @@ public void createTable(CreateTableStmt stmt) throws UserException { return; } db.registerTable(NamedExternalTable.of(tableId, tblName, dbName, catalog)); + catalog.onRefresh(true); } catch (Exception e) { throw new UserException(e.getMessage(), e); } @@ -161,6 +166,7 @@ public void dropTable(DropTableStmt stmt) throws DdlException { try { client.dropTable(dbName, stmt.getTableName()); db.unregisterTable(stmt.getTableName()); + catalog.onRefresh(true); } catch (Exception e) { throw new DdlException(e.getMessage(), e); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java index fa3c35e6ec380b..8edd3033187a6f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java @@ -32,6 +32,7 @@ public class HiveTableMetadata implements TableMetadata { private List partitionKeys; private String inputFormat; private String outputFormat; + private String serDe; private Map properties; // private String viewSql; @@ -41,13 +42,15 @@ public HiveTableMetadata(String dbName, List partitionKeys, Map props, String inputFormat, - String outputFormat) { + String outputFormat, + String serDe) { this.dbName = dbName; this.tableName = tblName; this.columns = columns; this.partitionKeys = partitionKeys; this.inputFormat = inputFormat; this.outputFormat = outputFormat; + this.serDe = serDe; this.properties = props; } @@ -82,13 +85,18 @@ public String getOutputFormat() { return outputFormat; } + public String getSerDe() { + return serDe; + } + public static HiveTableMetadata of(String dbName, String tblName, List columns, List partitionKeys, Map props, String inputFormat, - String outputFormat) { - return new HiveTableMetadata(dbName, tblName, columns, partitionKeys, props, inputFormat, outputFormat); + String outputFormat, String serDe) { + return new HiveTableMetadata(dbName, tblName, columns, partitionKeys, props, + inputFormat, outputFormat, serDe); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index 8099b0e03a9d09..68eae63c168105 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -54,6 +54,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.TableValidWriteIds; @@ -195,6 +196,7 @@ private static Table toHiveTable(HiveTableMetadata hiveTable) { table.setSd(toHiveStorageDesc(hiveTable.getColumns(), hiveTable.getInputFormat(), hiveTable.getOutputFormat(), + hiveTable.getSerDe(), location)); table.setPartitionKeys(hiveTable.getPartitionKeys()); // table.setViewOriginalText(hiveTable.getViewSql()); @@ -205,9 +207,12 @@ private static Table toHiveTable(HiveTableMetadata hiveTable) { } private static StorageDescriptor toHiveStorageDesc(List columns, String inputFormat, String outputFormat, - String location) { + String serDe, String location) { StorageDescriptor sd = new StorageDescriptor(); sd.setCols(toHiveColumns(columns)); + SerDeInfo serDeInfo = new SerDeInfo(); + serDeInfo.setSerializationLib(serDe); + sd.setSerdeInfo(serDeInfo); sd.setInputFormat(inputFormat); sd.setOutputFormat(outputFormat); if (StringUtils.isNotEmpty(location)) { From 3099f22c87207ba8bb9dcf97235bf0cd15b44573 Mon Sep 17 00:00:00 2001 From: slothever Date: Thu, 22 Feb 2024 20:54:52 +0800 Subject: [PATCH 10/16] fix refresh --- .../org/apache/doris/datasource/hive/HiveMetadataOps.java | 8 ++++---- .../doris/datasource/hive/ThriftHMSCachedClient.java | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index e2190eb277f78a..f9f87052b64828 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -91,6 +91,7 @@ public void createDb(CreateDbStmt stmt) throws DdlException { } catalogDatabase.setComment(properties.getOrDefault("comment", "")); client.createDatabase(catalogDatabase); + catalog.onRefresh(true); catalog.registerDatabase(dbId, fullDbName); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); @@ -103,8 +104,8 @@ public void dropDb(DropDbStmt stmt) throws DdlException { String dbName = stmt.getDbName(); try { client.dropDatabase(dbName); - catalog.unregisterDatabase(dbName); catalog.onRefresh(true); + catalog.unregisterDatabase(dbName); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } @@ -133,13 +134,12 @@ public void createTable(CreateTableStmt stmt) throws UserException { serDe); client.createTable(catalogTable, stmt.isSetIfNotExists()); - // TODO: need add first, use increased id + catalog.onRefresh(true); long tableId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(catalog.getId(), dbName, tblName); if (tableId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { return; } db.registerTable(NamedExternalTable.of(tableId, tblName, dbName, catalog)); - catalog.onRefresh(true); } catch (Exception e) { throw new UserException(e.getMessage(), e); } @@ -165,8 +165,8 @@ public void dropTable(DropTableStmt stmt) throws DdlException { } try { client.dropTable(dbName, stmt.getTableName()); - db.unregisterTable(stmt.getTableName()); catalog.onRefresh(true); + db.unregisterTable(stmt.getTableName()); } catch (Exception e) { throw new DdlException(e.getMessage(), e); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index 68eae63c168105..7f00c48ebbc0a3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -161,7 +161,8 @@ private static Database toHiveDatabase(HiveDatabaseMetadata hiveDb) { @Override public void createTable(TableMetadata tbl, boolean ignoreIfExists) { if (tableExists(tbl.getDbName(), tbl.getTableName())) { - return; + throw new HMSClientException("Table '" + tbl.getTableName() + + "' has existed in '" + tbl.getDbName() + "'."); } try (ThriftHMSClient client = getClient()) { try { From 16c88a86d59251d2b2d97694cd56610e3b7588f7 Mon Sep 17 00:00:00 2001 From: slothever Date: Fri, 23 Feb 2024 11:32:23 +0800 Subject: [PATCH 11/16] fix --- .../doris/analysis/CreateTableStmt.java | 5 ++++ .../apache/doris/datasource/CatalogMgr.java | 2 +- .../doris/datasource/ExternalCatalog.java | 2 +- .../datasource/hive/HMSExternalCatalog.java | 4 +-- .../datasource/hive/HiveMetadataOps.java | 15 +++-------- .../iceberg/IcebergExternalCatalog.java | 2 +- .../iceberg/IcebergHMSExternalCatalog.java | 26 +++---------------- .../iceberg/IcebergMetadataOps.java | 2 +- .../ExternalMetadataOperations.java | 2 +- .../ExternalMetadataOps.java | 2 +- .../operations}/NamedExternalTable.java | 2 +- 11 files changed, 21 insertions(+), 43 deletions(-) rename fe/fe-core/src/main/java/org/apache/doris/{catalog/external => datasource/operations}/ExternalMetadataOperations.java (97%) rename fe/fe-core/src/main/java/org/apache/doris/datasource/{hive => operations}/ExternalMetadataOps.java (97%) rename fe/fe-core/src/main/java/org/apache/doris/{catalog/external => datasource/operations}/NamedExternalTable.java (97%) diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java index 70fbbf3543284b..22d8f8a9193e2f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java @@ -276,6 +276,11 @@ public List getIndexes() { @Override public void analyze(Analyzer analyzer) throws UserException { + if (Config.isCloudMode() && properties != null + && properties.containsKey(PropertyAnalyzer.ENABLE_UNIQUE_KEY_MERGE_ON_WRITE)) { + // FIXME: MOW is not supported in cloud mode yet. + properties.put(PropertyAnalyzer.ENABLE_UNIQUE_KEY_MERGE_ON_WRITE, "false"); + } if (Strings.isNullOrEmpty(engineName) || engineName.equalsIgnoreCase(DEFAULT_ENGINE_NAME)) { this.properties = maybeRewriteByAutoBucket(distributionDesc, properties); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index 4bcb029b2b59c2..868ed0c2e14c04 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -31,7 +31,6 @@ import org.apache.doris.catalog.Resource; import org.apache.doris.catalog.Resource.ReferenceType; import org.apache.doris.catalog.TableIf; -import org.apache.doris.catalog.external.NamedExternalTable; import org.apache.doris.cluster.ClusterNamespace; import org.apache.doris.common.AnalysisException; import org.apache.doris.common.CaseSensibility; @@ -47,6 +46,7 @@ import org.apache.doris.common.util.TimeUtils; import org.apache.doris.datasource.hive.HMSExternalCatalog; import org.apache.doris.datasource.hive.HMSExternalTable; +import org.apache.doris.datasource.operations.NamedExternalTable; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.persist.OperationType; import org.apache.doris.persist.gson.GsonPostProcessable; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index 24e7b0938eb2b2..d9548b34c2a3af 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -34,13 +34,13 @@ import org.apache.doris.common.io.Writable; import org.apache.doris.common.util.Util; import org.apache.doris.datasource.es.EsExternalDatabase; -import org.apache.doris.datasource.hive.ExternalMetadataOps; import org.apache.doris.datasource.hive.HMSExternalCatalog; import org.apache.doris.datasource.hive.HMSExternalDatabase; import org.apache.doris.datasource.iceberg.IcebergExternalDatabase; import org.apache.doris.datasource.infoschema.ExternalInfoSchemaDatabase; import org.apache.doris.datasource.jdbc.JdbcExternalDatabase; import org.apache.doris.datasource.maxcompute.MaxComputeExternalDatabase; +import org.apache.doris.datasource.operations.ExternalMetadataOps; import org.apache.doris.datasource.paimon.PaimonExternalDatabase; import org.apache.doris.datasource.property.PropertyConverter; import org.apache.doris.datasource.test.TestExternalDatabase; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java index 3b6ef978bd98fe..11c5ad72c2b5e3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java @@ -19,7 +19,6 @@ import org.apache.doris.catalog.Env; import org.apache.doris.catalog.HdfsResource; -import org.apache.doris.catalog.external.ExternalMetadataOperations; import org.apache.doris.cluster.ClusterNamespace; import org.apache.doris.common.Config; import org.apache.doris.common.DdlException; @@ -32,6 +31,7 @@ import org.apache.doris.datasource.InitCatalogLog; import org.apache.doris.datasource.SessionContext; import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; +import org.apache.doris.datasource.operations.ExternalMetadataOperations; import org.apache.doris.datasource.property.PropertyConverter; import org.apache.doris.datasource.property.constants.HMSProperties; @@ -200,7 +200,7 @@ public void registerDatabase(long dbId, String dbName) { if (LOG.isDebugEnabled()) { LOG.debug("create database [{}]", dbName); } - dbNameToId.put(ClusterNamespace.getNameFromFullName(dbName), dbId); + dbNameToId.put(dbName, dbId); ExternalDatabase db = getDbForInit(dbName, dbId, logType); idToDb.put(dbId, db); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index f9f87052b64828..6779a602cbfbae 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -23,14 +23,13 @@ import org.apache.doris.analysis.DropTableStmt; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.JdbcResource; -import org.apache.doris.catalog.external.NamedExternalTable; import org.apache.doris.common.Config; import org.apache.doris.common.DdlException; import org.apache.doris.common.UserException; import org.apache.doris.datasource.ExternalDatabase; -import org.apache.doris.datasource.ExternalMetaIdMgr; import org.apache.doris.datasource.jdbc.client.JdbcClient; import org.apache.doris.datasource.jdbc.client.JdbcClientConfig; +import org.apache.doris.datasource.operations.ExternalMetadataOps; import com.google.common.base.Preconditions; import org.apache.hadoop.hive.conf.HiveConf; @@ -92,7 +91,6 @@ public void createDb(CreateDbStmt stmt) throws DdlException { catalogDatabase.setComment(properties.getOrDefault("comment", "")); client.createDatabase(catalogDatabase); catalog.onRefresh(true); - catalog.registerDatabase(dbId, fullDbName); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } @@ -105,7 +103,6 @@ public void dropDb(DropDbStmt stmt) throws DdlException { try { client.dropDatabase(dbName); catalog.onRefresh(true); - catalog.unregisterDatabase(dbName); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } @@ -134,12 +131,7 @@ public void createTable(CreateTableStmt stmt) throws UserException { serDe); client.createTable(catalogTable, stmt.isSetIfNotExists()); - catalog.onRefresh(true); - long tableId = Env.getCurrentEnv().getExternalMetaIdMgr().getTblId(catalog.getId(), dbName, tblName); - if (tableId == ExternalMetaIdMgr.META_ID_FOR_NOT_EXISTS) { - return; - } - db.registerTable(NamedExternalTable.of(tableId, tblName, dbName, catalog)); + db.setUnInitialized(true); } catch (Exception e) { throw new UserException(e.getMessage(), e); } @@ -165,8 +157,7 @@ public void dropTable(DropTableStmt stmt) throws DdlException { } try { client.dropTable(dbName, stmt.getTableName()); - catalog.onRefresh(true); - db.unregisterTable(stmt.getTableName()); + db.setUnInitialized(true); } catch (Exception e) { throw new DdlException(e.getMessage(), e); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java index 31d97174324c34..7c35839eb31de9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java @@ -17,10 +17,10 @@ package org.apache.doris.datasource.iceberg; -import org.apache.doris.catalog.external.ExternalMetadataOperations; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.InitCatalogLog; import org.apache.doris.datasource.SessionContext; +import org.apache.doris.datasource.operations.ExternalMetadataOperations; import org.apache.iceberg.catalog.Catalog; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java index f8b8e6e17fdfb0..b68e598566339f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java @@ -18,21 +18,18 @@ package org.apache.doris.datasource.iceberg; import org.apache.doris.common.Config; -import org.apache.doris.common.security.authentication.AuthType; import org.apache.doris.common.security.authentication.AuthenticationConfig; +import org.apache.doris.common.security.authentication.HadoopUGI; import org.apache.doris.datasource.CatalogProperty; import org.apache.doris.datasource.hive.HMSCachedClient; -import org.apache.doris.datasource.hive.HMSClientException; import org.apache.doris.datasource.hive.HiveMetadataOps; import org.apache.doris.datasource.property.PropertyConverter; import org.apache.doris.datasource.property.constants.HMSProperties; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.security.UserGroupInformation; import org.apache.iceberg.CatalogProperties; import org.apache.iceberg.hive.HiveCatalog; -import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -60,24 +57,9 @@ protected void initLocalObjectsImpl() { } hiveConf.set(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.name(), String.valueOf(Config.hive_metastore_client_timeout_second)); - String authentication = catalogProperty.getOrDefault( - AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, ""); - if (AuthType.KERBEROS.getDesc().equals(authentication)) { - hiveConf.set(AuthenticationConfig.HADOOP_SECURITY_AUTHENTICATION, authentication); - UserGroupInformation.setConfiguration(hiveConf); - try { - /** - * Because metastore client is created by using - * {@link org.apache.hadoop.hive.metastore.RetryingMetaStoreClient#getProxy} - * it will relogin when TGT is expired, so we don't need to relogin manually. - */ - UserGroupInformation.loginUserFromKeytab( - catalogProperty.getOrDefault(AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL, ""), - catalogProperty.getOrDefault(AuthenticationConfig.HADOOP_KERBEROS_KEYTAB, "")); - } catch (IOException e) { - throw new HMSClientException("login with kerberos auth failed for catalog %s", e, this.getName()); - } - } + HadoopUGI.tryKrbLogin(this.getName(), AuthenticationConfig.getKerberosConfig(hiveConf, + AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL, + AuthenticationConfig.HADOOP_KERBEROS_KEYTAB)); HMSCachedClient cachedClient = HiveMetadataOps.createCachedClient(hiveConf, 1, null); String location = cachedClient.getCatalogLocation("hive"); catalogProperties.put(CatalogProperties.WAREHOUSE_LOCATION, location); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java index f50045c606e4a5..27669baf91feee 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java @@ -30,7 +30,7 @@ import org.apache.doris.common.UserException; import org.apache.doris.common.util.Util; import org.apache.doris.datasource.DorisTypeVisitor; -import org.apache.doris.datasource.hive.ExternalMetadataOps; +import org.apache.doris.datasource.operations.ExternalMetadataOps; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/external/ExternalMetadataOperations.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/ExternalMetadataOperations.java similarity index 97% rename from fe/fe-core/src/main/java/org/apache/doris/catalog/external/ExternalMetadataOperations.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/operations/ExternalMetadataOperations.java index fa393673c2d054..4a2757f918f294 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/external/ExternalMetadataOperations.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/ExternalMetadataOperations.java @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -package org.apache.doris.catalog.external; +package org.apache.doris.datasource.operations; import org.apache.doris.datasource.hive.HMSExternalCatalog; import org.apache.doris.datasource.hive.HiveMetadataOps; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/ExternalMetadataOps.java similarity index 97% rename from fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/operations/ExternalMetadataOps.java index 5a95b86372c658..2d9498d0b91885 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ExternalMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/ExternalMetadataOps.java @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -package org.apache.doris.datasource.hive; +package org.apache.doris.datasource.operations; import org.apache.doris.analysis.CreateDbStmt; import org.apache.doris.analysis.CreateTableStmt; diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/external/NamedExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/NamedExternalTable.java similarity index 97% rename from fe/fe-core/src/main/java/org/apache/doris/catalog/external/NamedExternalTable.java rename to fe/fe-core/src/main/java/org/apache/doris/datasource/operations/NamedExternalTable.java index 1d51380f4643ad..8802a34e74f924 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/external/NamedExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/NamedExternalTable.java @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -package org.apache.doris.catalog.external; +package org.apache.doris.datasource.operations; import org.apache.doris.datasource.ExternalCatalog; import org.apache.doris.datasource.ExternalTable; From f42b9f2b1c54e89332aea57b2781d572fb8b8cb2 Mon Sep 17 00:00:00 2001 From: slothever Date: Fri, 23 Feb 2024 11:32:23 +0800 Subject: [PATCH 12/16] fix --- .../apache/doris/datasource/CatalogMgr.java | 2 +- .../doris/datasource/ExternalTable.java | 4 ++ .../hive/HiveMetaStoreClientHelper.java | 2 +- .../hive/ThriftHMSCachedClient.java | 2 +- .../operations/NamedExternalTable.java | 49 ------------------- 5 files changed, 7 insertions(+), 52 deletions(-) delete mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/operations/NamedExternalTable.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index 868ed0c2e14c04..4c2c4d40b27eb8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -822,7 +822,7 @@ public void registerExternalTableFromEvent(String dbName, String tableName, db.writeLock(); try { - NamedExternalTable namedTable = NamedExternalTable.of(tblId, tableName, dbName, (ExternalCatalog) catalog); + HMSExternalTable namedTable = new HMSExternalTable(tblId, tableName, dbName, (HMSExternalCatalog) catalog); namedTable.setUpdateTime(updateTime); db.registerTable(namedTable); } finally { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java index 8f7fada5f614c6..c396b3910956de 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalTable.java @@ -327,6 +327,10 @@ public long getUpdateTime() { return this.schemaUpdateTime; } + public void setUpdateTime(long schemaUpdateTime) { + this.schemaUpdateTime = schemaUpdateTime; + } + @Override public long getLastCheckTime() { return 0; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java index fc3ac4bf8c4e5e..037fbe02d68b4b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java @@ -609,7 +609,7 @@ public static String dorisTypeToHiveType(Type dorisType) { } else if (dorisType.equals(Type.STRING)) { return "string"; } - return "string"; + throw new HMSClientException("Unsupported type conversion of " + dorisType.toSql()); } /** diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java index 7f00c48ebbc0a3..c8207906624eee 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/ThriftHMSCachedClient.java @@ -229,7 +229,7 @@ private static List toHiveColumns(List columns) { List result = new ArrayList<>(); for (Column column : columns) { FieldSchema hiveFieldSchema = new FieldSchema(); - // TODO: refactor atlr4 file to support hive column type + // TODO: add doc, just support doris type hiveFieldSchema.setType(HiveMetaStoreClientHelper.dorisTypeToHiveType(column.getType())); hiveFieldSchema.setName(column.getName()); hiveFieldSchema.setComment(column.getComment()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/NamedExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/NamedExternalTable.java deleted file mode 100644 index 8802a34e74f924..00000000000000 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/operations/NamedExternalTable.java +++ /dev/null @@ -1,49 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.datasource.operations; - -import org.apache.doris.datasource.ExternalCatalog; -import org.apache.doris.datasource.ExternalTable; - -/** - * use to save table info. - */ -public class NamedExternalTable extends ExternalTable { - - private NamedExternalTable(long id, String name, String dbName, ExternalCatalog catalog) { - super(id, name, catalog, dbName, TableType.HMS_EXTERNAL_TABLE); - } - - public void setUpdateTime(long updateTime) { - schemaUpdateTime = updateTime; - } - - /** - * - * @param id id - * @param tableName table name - * @param dbName db name - * @param catalog catalog - * @return NamedExternalTable external table name info - */ - public static NamedExternalTable of(long id, String tableName, String dbName, ExternalCatalog catalog) { - return new NamedExternalTable(id, tableName, dbName, catalog); - } -} - - From f54dc701db48b94450742af6615fe3cc02fc56d9 Mon Sep 17 00:00:00 2001 From: wuwenchi Date: Fri, 23 Feb 2024 14:29:58 +0800 Subject: [PATCH 13/16] fix --- .../datasource/iceberg/IcebergDLFExternalCatalog.java | 2 +- .../datasource/iceberg/IcebergExternalCatalog.java | 10 +++++++++- .../datasource/iceberg/IcebergGlueExternalCatalog.java | 2 +- .../datasource/iceberg/IcebergHMSExternalCatalog.java | 2 +- .../iceberg/IcebergHadoopExternalCatalog.java | 2 +- .../datasource/iceberg/IcebergRestExternalCatalog.java | 2 +- 6 files changed, 14 insertions(+), 6 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergDLFExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergDLFExternalCatalog.java index a243a17b31a979..e4d8b2f55c43f1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergDLFExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergDLFExternalCatalog.java @@ -37,7 +37,7 @@ public IcebergDLFExternalCatalog(long catalogId, String name, String resource, M } @Override - protected void initLocalObjectsImpl() { + protected void initCatalog() { icebergCatalogType = ICEBERG_DLF; DLFCatalog dlfCatalog = new DLFCatalog(); dlfCatalog.setConf(getConfiguration()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java index 7c35839eb31de9..426657bc539d86 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java @@ -43,10 +43,18 @@ public IcebergExternalCatalog(long catalogId, String name, String comment) { @Override protected void init() { - metadataOps = ExternalMetadataOperations.newIcebergMetadataOps(this, catalog); super.init(); } + // Create catalog based on catalog type + protected abstract void initCatalog(); + + @Override + protected void initLocalObjectsImpl() { + initCatalog(); + metadataOps = ExternalMetadataOperations.newIcebergMetadataOps(this, catalog); + } + public Catalog getCatalog() { makeSureInitialized(); return ((IcebergMetadataOps) metadataOps).getCatalog(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java index 0c5bd6e1ba5948..08e7fe044ba4d4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergGlueExternalCatalog.java @@ -42,7 +42,7 @@ public IcebergGlueExternalCatalog(long catalogId, String name, String resource, } @Override - protected void initLocalObjectsImpl() { + protected void initCatalog() { icebergCatalogType = ICEBERG_GLUE; GlueCatalog glueCatalog = new GlueCatalog(); glueCatalog.setConf(getConfiguration()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java index b68e598566339f..875633da2933e0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHMSExternalCatalog.java @@ -43,7 +43,7 @@ public IcebergHMSExternalCatalog(long catalogId, String name, String resource, M } @Override - protected void initLocalObjectsImpl() { + protected void initCatalog() { icebergCatalogType = ICEBERG_HMS; HiveCatalog hiveCatalog = new org.apache.iceberg.hive.HiveCatalog(); hiveCatalog.setConf(getConfiguration()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java index 97de2bfd55c194..68aa92f4c130c0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java @@ -51,7 +51,7 @@ public IcebergHadoopExternalCatalog(long catalogId, String name, String resource } @Override - protected void initLocalObjectsImpl() { + protected void initCatalog() { icebergCatalogType = ICEBERG_HADOOP; HadoopCatalog hadoopCatalog = new HadoopCatalog(); Configuration conf = getConfiguration(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergRestExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergRestExternalCatalog.java index aefdfb65ceaadb..e839b9a00177c2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergRestExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergRestExternalCatalog.java @@ -42,7 +42,7 @@ public IcebergRestExternalCatalog(long catalogId, String name, String resource, } @Override - protected void initLocalObjectsImpl() { + protected void initCatalog() { icebergCatalogType = ICEBERG_REST; Configuration conf = replaceS3Properties(getConfiguration()); From a0e2eef45f37179ece1da2942570b782b7d0565a Mon Sep 17 00:00:00 2001 From: slothever Date: Fri, 23 Feb 2024 14:46:46 +0800 Subject: [PATCH 14/16] fix iceberg refresh --- .../org/apache/doris/datasource/CatalogMgr.java | 1 - .../datasource/iceberg/IcebergMetadataOps.java | 13 +++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index 4c2c4d40b27eb8..c8416370b3269c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -46,7 +46,6 @@ import org.apache.doris.common.util.TimeUtils; import org.apache.doris.datasource.hive.HMSExternalCatalog; import org.apache.doris.datasource.hive.HMSExternalTable; -import org.apache.doris.datasource.operations.NamedExternalTable; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.persist.OperationType; import org.apache.doris.persist.gson.GsonPostProcessable; diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java index 27669baf91feee..85ca2c957c0ee8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java @@ -30,6 +30,7 @@ import org.apache.doris.common.UserException; import org.apache.doris.common.util.Util; import org.apache.doris.datasource.DorisTypeVisitor; +import org.apache.doris.datasource.ExternalDatabase; import org.apache.doris.datasource.operations.ExternalMetadataOps; import org.apache.iceberg.PartitionSpec; @@ -95,6 +96,7 @@ public void createDb(CreateDbStmt stmt) throws DdlException { String dbName = stmt.getFullDbName(); Map properties = stmt.getProperties(); nsCatalog.createNamespace(Namespace.of(dbName), properties); + dorisCatalog.onRefresh(true); } @Override @@ -107,11 +109,16 @@ public void dropDb(DropDbStmt stmt) throws DdlException { dorisCatalog.getDbNameToId().remove(dbName); } nsCatalog.dropNamespace(Namespace.of(dbName)); + dorisCatalog.onRefresh(true); } @Override public void createTable(CreateTableStmt stmt) throws UserException { String dbName = stmt.getDbName(); + ExternalDatabase db = dorisCatalog.getDbNullable(dbName); + if (db == null) { + throw new UserException("Failed to get database: '" + dbName + "' in catalog: " + dorisCatalog.getName()); + } String tableName = stmt.getTableName(); List columns = stmt.getColumns(); List collect = columns.stream() @@ -124,12 +131,18 @@ public void createTable(CreateTableStmt stmt) throws UserException { Map properties = stmt.getProperties(); PartitionSpec partitionSpec = IcebergUtils.solveIcebergPartitionSpec(properties, schema); catalog.createTable(TableIdentifier.of(dbName, tableName), schema, partitionSpec, properties); + db.setUnInitialized(true); } @Override public void dropTable(DropTableStmt stmt) throws DdlException { String dbName = stmt.getDbName(); + ExternalDatabase db = dorisCatalog.getDbNullable(dbName); + if (db == null) { + throw new DdlException("Failed to get database: '" + dbName + "' in catalog: " + dorisCatalog.getName()); + } String tableName = stmt.getTableName(); catalog.dropTable(TableIdentifier.of(dbName, tableName)); + db.setUnInitialized(true); } } From 696f569807b61676432c9e54ea24de729fb14287 Mon Sep 17 00:00:00 2001 From: slothever Date: Fri, 23 Feb 2024 16:26:48 +0800 Subject: [PATCH 15/16] support orc db/tbl --- .../src/main/java/org/apache/doris/common/Config.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java index b8e16435e371cd..4914f1e76a5bf4 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java @@ -2204,17 +2204,17 @@ public class Config extends ConfigBase { @ConfField(mutable = true, masterOnly = true, description = { "Hive创建外部表默认指定的input format", "Default hive input format for creating table."}) - public static String hive_default_input_format = "org.apache.hadoop.mapred.TextInputFormat"; + public static String hive_default_input_format = "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"; @ConfField(mutable = true, masterOnly = true, description = { "Hive创建外部表默认指定的output format", "Default hive output format for creating table."}) - public static String hive_default_output_format = "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"; + public static String hive_default_output_format = "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"; @ConfField(mutable = true, masterOnly = true, description = { "Hive创建外部表默认指定的SerDe类", "Default hive serde class for creating table."}) - public static String hive_default_serde = "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"; + public static String hive_default_serde = "org.apache.hadoop.hive.ql.io.orc.OrcSerde"; @ConfField public static int statistics_sql_parallel_exec_instance_num = 1; From 392b0a1827f8fc52edf8855de5bee046adc51b2a Mon Sep 17 00:00:00 2001 From: slothever Date: Sun, 25 Feb 2024 20:11:28 +0800 Subject: [PATCH 16/16] fix ut --- .../java/org/apache/doris/datasource/ExternalCatalog.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index d9548b34c2a3af..0eabffce4457a3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -595,7 +595,7 @@ public void createDb(CreateDbStmt stmt) throws DdlException { makeSureInitialized(); if (metadataOps == null) { LOG.warn("dropDatabase not implemented"); - throw new NotImplementedException("dropDatabase not implemented"); + return; } try { metadataOps.createDb(stmt); @@ -610,7 +610,7 @@ public void dropDb(DropDbStmt stmt) throws DdlException { makeSureInitialized(); if (metadataOps == null) { LOG.warn("dropDatabase not implemented"); - throw new NotImplementedException("dropDatabase not implemented"); + return; } try { metadataOps.dropDb(stmt); @@ -625,7 +625,7 @@ public void createTable(CreateTableStmt stmt) throws UserException { makeSureInitialized(); if (metadataOps == null) { LOG.warn("createTable not implemented"); - throw new NotImplementedException("createTable not implemented"); + return; } try { metadataOps.createTable(stmt); @@ -640,7 +640,7 @@ public void dropTable(DropTableStmt stmt) throws DdlException { makeSureInitialized(); if (metadataOps == null) { LOG.warn("dropTable not implemented"); - throw new NotImplementedException("dropTable not implemented"); + return; } try { metadataOps.dropTable(stmt);