diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g index ad34eddbc1ae..841723c38226 100644 --- a/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g +++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g @@ -243,12 +243,27 @@ tableName @init { gParent.pushMsg("table name", state); } @after { gParent.popMsg(state); } : - db=identifier DOT tab=identifier (DOT meta=identifier)? - {tables.add(new ImmutablePair<>($db.text, $tab.text));} - -> ^(TOK_TABNAME $db $tab $meta?) - | - tab=identifier - {tables.add(new ImmutablePair<>(null, $tab.text));} + // case 1:catalog.db.table(.meta)? + (cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)?) + => + cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)? + { + tables.add(new ImmutablePair<>($cat.text + "." + $db.text, $tab.text)); + } + -> ^(TOK_TABNAME $cat $db $tab $meta?) + + // case 2:db.table + | db=identifier DOT tab=identifier + { + tables.add(new ImmutablePair<>($db.text, $tab.text)); + } + -> ^(TOK_TABNAME $db $tab) + + // case 3:table + | tab=identifier + { + tables.add(new ImmutablePair<>(null, $tab.text)); + } -> ^(TOK_TABNAME $tab) ; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java index 74273f780cf4..a2fd8d773afa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/CreateTableAnalyzer.java @@ -233,12 +233,13 @@ private static boolean isIcebergTable(Map tblProps) { .equalsIgnoreCase(tblProps.get(META_TABLE_STORAGE)); } - private String getDefaultLocation(String dbName, String tableName, boolean isExt) - throws SemanticException { + private String getDefaultLocation(TableName qualifiedTabName, boolean isExt) + throws SemanticException { String tblLocation; try { Warehouse wh = new Warehouse(conf); - tblLocation = wh.getDefaultTablePath(db.getDatabase(dbName), tableName, isExt).toUri().getPath(); + tblLocation = wh.getDefaultTablePath(db.getDatabase(qualifiedTabName.getCat(), + qualifiedTabName.getDb()), qualifiedTabName.getTable(), isExt).toUri().getPath(); } catch (MetaException | HiveException e) { throw new SemanticException(e); } @@ -255,7 +256,7 @@ private String getDefaultLocation(String dbName, String tableName, boolean isExt */ private Map validateAndAddDefaultProperties(Map tblProp, boolean isExt, StorageFormat storageFormat, String qualifiedTableName, List sortCols, boolean isMaterialization, - boolean isTemporaryTable, boolean isTransactional, boolean isManaged, String[] qualifiedTabName, + boolean isTemporaryTable, boolean isTransactional, boolean isManaged, TableName qualifiedTabName, boolean isTableTypeChanged) throws SemanticException { Map retValue = Optional.ofNullable(tblProp).orElseGet(HashMap::new); @@ -316,7 +317,7 @@ private Map validateAndAddDefaultProperties(Map if (isIcebergTable(retValue)) { SessionStateUtil.addResourceOrThrow(conf, SessionStateUtil.DEFAULT_TABLE_LOCATION, - getDefaultLocation(qualifiedTabName[0], qualifiedTabName[1], true)); + getDefaultLocation(qualifiedTabName, true)); } return retValue; } @@ -348,7 +349,8 @@ private void updateDefaultTblProps(Map source, Map tableIdentifier = getDbTableNamePair((ASTNode) root.getChild(0)); - if (tableIdentifier.getValue().contains(".")) { + Triple tableIdentifier = getCatDbTableNameTriple((ASTNode) root.getChild(0)); + if (tableIdentifier.toString().contains(".")) { throw new SemanticException("The SHOW CREATE TABLE command is not supported for metadata tables."); } - Table table = getTable(tableIdentifier.getKey(), tableIdentifier.getValue(), true); + String catName = tableIdentifier.getLeft(); + String dbName = tableIdentifier.getMiddle(); + String tblName = tableIdentifier.getRight(); + TableName tableName = new TableName(catName, dbName, tblName); + Table table = getTable(tableName, true); inputs.add(new ReadEntity(table)); // If no DB was specified in statement, do not include it in the final output - ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getDbName(), table.getTableName(), - ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getKey())); + ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getCatName(), table.getDbName(), table.getTableName(), + ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getMiddle())); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java index cd580b7f70fd..878fdae6cecc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableDesc.java @@ -33,12 +33,14 @@ public class ShowCreateTableDesc implements DDLDesc, Serializable { public static final String SCHEMA = "createtab_stmt#string"; + private final String catalogName; private final String databaseName; private final String tableName; private final String resFile; private final boolean isRelative; - public ShowCreateTableDesc(String databaseName, String tableName, String resFile, boolean isRelative) { + public ShowCreateTableDesc(String catalogName, String databaseName, String tableName, String resFile, boolean isRelative) { + this.catalogName = catalogName; this.databaseName = databaseName; this.tableName = tableName; this.resFile = resFile; @@ -60,6 +62,11 @@ public String getDatabaseName() { return databaseName; } + @Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatalogName() { + return catalogName; + } + @Explain(displayName = "relative table location", explainLevels = { Level.EXTENDED }) public boolean isRelative() { return isRelative; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java index 7b8cc1f2ba49..12229b2f4052 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/create/show/ShowCreateTableOperation.java @@ -23,13 +23,10 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.ql.ddl.DDLOperation; import org.apache.hadoop.hive.ql.ddl.DDLOperationContext; import org.apache.hadoop.hive.ql.ddl.ShowUtils; @@ -50,7 +47,8 @@ public ShowCreateTableOperation(DDLOperationContext context, ShowCreateTableDesc public int execute() throws HiveException { // get the create table statement for the table and populate the output try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) { - Table table = context.getDb().getTable(desc.getDatabaseName(), desc.getTableName()); + TableName tn = new TableName(desc.getCatalogName(), desc.getDatabaseName(), desc.getTableName()); + Table table = context.getDb().getTable(tn, true); DDLPlanUtils ddlObj = new DDLPlanUtils(); String command; if (table.isView()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java index 826cb299e918..649c9ddf3b85 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesAnalyzer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; @@ -49,6 +50,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); + String catName = HiveUtils.getCurrentCatalogOrDefault(conf); String dbName = SessionState.get().getCurrentDatabase(); String tableNames = null; TableType tableTypeFilter = null; @@ -57,7 +59,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { ASTNode child = (ASTNode) root.getChild(i); if (child.getType() == HiveParser.TOK_FROM) { // Specifies a DB dbName = unescapeIdentifier(root.getChild(++i).getText()); - db.validateDatabaseExists(dbName); + db.validateDatabaseExists(catName,dbName); } else if (child.getType() == HiveParser.TOK_TABLE_TYPE) { // Filter on table type String tableType = unescapeIdentifier(child.getChild(0).getText()); if (!"table_type".equalsIgnoreCase(tableType)) { @@ -73,7 +75,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException { inputs.add(new ReadEntity(getDatabase(dbName))); - ShowTablesDesc desc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames, tableTypeFilter, isExtended); + ShowTablesDesc desc = new ShowTablesDesc(ctx.getResFile(), catName, dbName, tableNames, tableTypeFilter, isExtended); Task task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)); rootTasks.add(task); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java index 99c1118f9f98..c1be3301540c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesDesc.java @@ -37,13 +37,15 @@ public class ShowTablesDesc implements DDLDesc, Serializable { private static final String EXTENDED_TABLES_SCHEMA = "tab_name,table_type#string,string"; private final String resFile; + private final String catName; private final String dbName; private final String pattern; private final TableType typeFilter; private final boolean isExtended; - public ShowTablesDesc(Path resFile, String dbName, String pattern, TableType typeFilter, boolean isExtended) { + public ShowTablesDesc(Path resFile, String catName, String dbName, String pattern, TableType typeFilter, boolean isExtended) { this.resFile = resFile.toString(); + this.catName = catName; this.dbName = dbName; this.pattern = pattern; this.typeFilter = typeFilter; @@ -60,6 +62,11 @@ public String getResFile() { return resFile; } + @Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) + public String getCatName() { + return catName; + } + @Explain(displayName = "database name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getDbName() { return dbName; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java index 40f7b767e88d..551d4e6b477b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/show/tables/ShowTablesOperation.java @@ -45,7 +45,7 @@ public ShowTablesOperation(DDLOperationContext context, ShowTablesDesc desc) { @Override public int execute() throws HiveException { - if (!context.getDb().databaseExists(desc.getDbName())) { + if (!context.getDb().databaseExists(desc.getCatName(), desc.getDbName())) { throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, desc.getDbName()); } @@ -61,7 +61,7 @@ public int execute() throws HiveException { private void showTables() throws HiveException { String pattern = UDFLike.likePatternToRegExp(desc.getPattern(), false, true); List tableNames = new ArrayList<>( - context.getDb().getTablesByType(desc.getDbName(), pattern, desc.getTypeFilter())); + context.getDb().getTablesByType(desc.getCatName(), desc.getDbName(), pattern, desc.getTypeFilter())); Collections.sort(tableNames); LOG.debug("Found {} table(s) matching the SHOW TABLES statement.", tableNames.size()); @@ -79,7 +79,7 @@ private void showTablesExtended() throws HiveException { TableType typeFilter = desc.getTypeFilter(); TableType[] tableTypes = typeFilter == null ? TableType.values() : new TableType[]{typeFilter}; for (TableType tableType : tableTypes) { - List tables = context.getDb().getTablesByType(desc.getDbName(), pattern, tableType); + List tables = context.getDb().getTablesByType(desc.getCatName(), desc.getDbName(), pattern, tableType); tables.forEach(name -> tableNameToType.put(name, tableType.toString())); } LOG.debug("Found {} table(s) matching the SHOW EXTENDED TABLES statement.", tableNameToType.size()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index b3977b8c9578..e1d476fc8ecc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1668,8 +1668,20 @@ public Table getTable(final String dbName, final String tableName) throws HiveEx * if there's an internal error or if the table doesn't exist */ public Table getTable(TableName tableName) throws HiveException { - return this.getTable(ObjectUtils.firstNonNull(tableName.getDb(), SessionState.get().getCurrentDatabase()), - tableName.getTable(), tableName.getTableMetaRef(), true); + return getTable(tableName, true); + } + + /** + * Returns metadata of the table. We should prioritize this method and phase out other getTable calls. + * + * @param tableName the tableName object + * @param throwException + * controls whether an exception is thrown or a returns a null + * @exception HiveException + * if there's an internal error or if the table doesn't exist + */ + public Table getTable(TableName tableName, boolean throwException) throws HiveException { + return this.getTable(tableName, throwException, false, false); } /** @@ -1767,10 +1779,23 @@ public Table getTable(final String dbName, final String tableName, String tableM * get column statistics if available * @return the table or if throwException is false a null value. * @throws HiveException + * + * @deprecated use {@link #getTable(TableName, boolean, boolean, boolean)} */ public Table getTable(final String dbName, final String tableName, String tableMetaRef, boolean throwException, boolean checkTransactional, boolean getColumnStats) throws HiveException { + TableName table = new TableName(getDefaultCatalog(conf), dbName, tableName, tableMetaRef); + return getTable(table, throwException, checkTransactional, getColumnStats); + } + + public Table getTable(final TableName table, boolean throwException, + boolean checkTransactional, boolean getColumnStats) throws HiveException { + + String catName = table.getCat() != null ? table.getCat() : HiveUtils.getCurrentCatalogOrDefault(conf); + String dbName = table.getDb() != null ? table.getDb() : SessionState.get().getCurrentDatabase(); + String tableName = table.getTable(); + String tableMetaRef = table.getTableMetaRef(); if (tableName == null || tableName.equals("")) { throw new HiveException("empty table creation??"); } @@ -1780,7 +1805,7 @@ public Table getTable(final String dbName, final String tableName, String tableM try { // Note: this is currently called w/true from StatsOptimizer only. GetTableRequest request = new GetTableRequest(dbName, tableName); - request.setCatName(getDefaultCatalog(conf)); + request.setCatName(catName); request.setGetColumnStats(getColumnStats); request.setEngine(Constants.HIVE_ENGINE); if (checkTransactional) { @@ -2554,6 +2579,9 @@ public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) } } + /** + * @deprecated use {@link #validateDatabaseExists(String, String)} + */ public void validateDatabaseExists(String databaseName) throws SemanticException { boolean exists; try { @@ -2567,6 +2595,19 @@ public void validateDatabaseExists(String databaseName) throws SemanticException } } + public void validateDatabaseExists(String catalogName, String databaseName) throws SemanticException { + boolean exists; + try { + exists = databaseExists(catalogName, databaseName); + } catch (HiveException e) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(databaseName), e); + } + + if (!exists) { + throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(databaseName)); + } + } + public Catalog getCatalog(String catName) throws HiveException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 393006bd8f06..c67404a84f8d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -40,6 +40,7 @@ import org.antlr.runtime.tree.Tree; import org.apache.calcite.sql.SqlKind; import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.lang3.tuple.Triple; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; @@ -124,6 +125,7 @@ import com.google.common.collect.ImmutableList; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_LOAD_DATA_USE_NATIVE_API; +import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog; import static org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder.NULLS_FIRST; import static org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder.NULLS_LAST; import static org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order.ASC; @@ -431,6 +433,9 @@ public static String getUnescapedName(ASTNode tableOrColumnNode) throws Semantic return getUnescapedName(tableOrColumnNode, null); } + /** + * * @deprecated use {@link #getCatDbTableNameTriple(ASTNode)} instead. + */ public static Map.Entry getDbTableNamePair(ASTNode tableNameNode) throws SemanticException { if (tableNameNode.getType() != HiveParser.TOK_TABNAME || @@ -459,6 +464,29 @@ public static Map.Entry getDbTableNamePair(ASTNode tableNameNode } } + public static Triple getCatDbTableNameTriple(ASTNode tableNameNode) throws SemanticException { + if (tableNameNode.getType() != HiveParser.TOK_TABNAME || tableNameNode.getChildCount() < 1 || tableNameNode.getChildCount() > 4) { + throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.INVALID_TABLE_NAME.getMsg(), tableNameNode)); + } + + List parts = new ArrayList<>(); + for (int i = 0; i < tableNameNode.getChildCount(); i++) { + String part = unescapeIdentifier(tableNameNode.getChild(i).getText()); + if (part != null && part.contains(".")) { + throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tableNameNode)); + } + parts.add(part); + } + + return switch (parts.size()) { + case 1 -> Triple.of(null, null, parts.get(0)); + case 2 -> Triple.of(null, parts.get(0), parts.get(1)); + case 3 -> Triple.of(parts.get(0), parts.get(1), parts.get(2)); + case 4 -> Triple.of(parts.get(0), parts.get(1), parts.get(2) + "." + parts.get(3)); + default -> throw new SemanticException(ASTErrorUtils.getMsg(ErrorMsg.INVALID_TABLE_NAME.getMsg(), tableNameNode)); + }; + } + public static String getUnescapedName(ASTNode tableOrColumnNode, String currentDatabase) throws SemanticException { int tokenType = tableOrColumnNode.getToken().getType(); if (tokenType == HiveParser.TOK_TABNAME) { @@ -508,13 +536,21 @@ public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalo ErrorMsg.INVALID_TABLE_NAME.getMsg(), tabNameNode)); } - if (tabNameNode.getChildCount() == 3) { - final String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); - final String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); - final String tableMetaRef = unescapeIdentifier(tabNameNode.getChild(2).getText()); + if (tabNameNode.getChildCount() == 4) { + catalogName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String dbName = unescapeIdentifier(tabNameNode.getChild(1).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(2).getText()); + final String tableMetaRef = unescapeIdentifier(tabNameNode.getChild(3).getText()); return HiveTableName.fromString(tableName, catalogName, dbName, tableMetaRef); } + if (tabNameNode.getChildCount() == 3) { + catalogName = unescapeIdentifier(tabNameNode.getChild(0).getText()); + final String dbName = unescapeIdentifier(tabNameNode.getChild(1).getText()); + final String tableName = unescapeIdentifier(tabNameNode.getChild(2).getText()); + return HiveTableName.fromString(tableName, catalogName, dbName); + } + if (tabNameNode.getChildCount() == 2) { final String dbName = unescapeIdentifier(tabNameNode.getChild(0).getText()); final String tableName = unescapeIdentifier(tabNameNode.getChild(1).getText()); @@ -522,14 +558,14 @@ public static TableName getQualifiedTableName(ASTNode tabNameNode, String catalo throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName, dbName); + return HiveTableName.fromString(tableName, catalogName, dbName); } final String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText()); if (tableName.contains(".")) { throw new SemanticException(ASTErrorUtils.getMsg( ErrorMsg.OBJECTNAME_CONTAINS_DOT.getMsg(), tabNameNode)); } - return HiveTableName.ofNullable(tableName); + return HiveTableName.fromString(tableName, catalogName, SessionState.get().getCurrentDatabase()); } /** @@ -1959,7 +1995,27 @@ protected Table getTable(TableName tn) throws SemanticException { } protected Table getTable(TableName tn, boolean throwException) throws SemanticException { - return getTable(tn.getDb(), tn.getTable(), tn.getTableMetaRef(), throwException); + String catName = tn.getCat(); + String dbName = tn.getDb(); + String tblName = tn.getTable(); + + Table tab; + try { + tab = db.getTable(tn, false); + } + catch (InvalidTableException e) { + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, + catName, dbName).getNotEmptyDbTable()), e); + } + catch (Exception e) { + throw new SemanticException(e.getMessage(), e); + } + if (tab == null && throwException) { + // getTable needs a refactor with all ~50 occurences + throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, + catName, dbName).getNotEmptyDbTable())); + } + return tab; } protected Table getTable(String tblName) throws SemanticException { @@ -1974,25 +2030,14 @@ protected Table getTable(String database, String tblName, boolean throwException return getTable(database, tblName, null, throwException); } + /** + * @deprecated use {@link #getTable(TableName, boolean)} instead + * Since this is a protected method, can we directly remove it? + */ protected Table getTable(String database, String tblName, String tableMetaRef, boolean throwException) throws SemanticException { - Table tab; - try { - String tableName = tableMetaRef == null ? tblName : tblName + "." + tableMetaRef; - tab = database == null ? db.getTable(tableName, false) - : db.getTable(database, tblName, tableMetaRef, false); - } - catch (InvalidTableException e) { - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable()), e); - } - catch (Exception e) { - throw new SemanticException(e.getMessage(), e); - } - if (tab == null && throwException) { - // getTable needs a refactor with all ~50 occurences - throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(TableName.fromString(tblName, null, database).getNotEmptyDbTable())); - } - return tab; + TableName table = new TableName(getDefaultCatalog(conf), database, tblName, tableMetaRef); + return getTable(table, throwException); } public List> getAllRootTasks() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index f08808b01415..e0c7572d7461 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -13888,12 +13888,13 @@ protected void validateStorageFormat( } /** Adds entities for create table/create view. */ - protected void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type, + protected void addDbAndTabToOutputs(TableName qualifiedTabName, TableType type, boolean isTemporary, Map tblProps, StorageFormat storageFormat) throws SemanticException { - Database database = getDatabase(qualifiedTabName[0]); + Database database = getDatabase(qualifiedTabName.getCat(), qualifiedTabName.getDb(), true); outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED)); - Table t = new Table(qualifiedTabName[0], qualifiedTabName[1]); + Table t = new Table(qualifiedTabName.getDb(), qualifiedTabName.getTable()); + t.setCatalogName(qualifiedTabName.getCat()); t.setParameters(tblProps); t.setTableType(type); t.setTemporary(isTemporary); @@ -14071,7 +14072,7 @@ protected ASTNode analyzeCreateView(ASTNode ast, QB qb, PlannerContext plannerCt storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getStorageHandler(), storageFormat.getSerdeProps()); - addDbAndTabToOutputs(new String[] {qualTabName.getDb(), qualTabName.getTable()}, TableType.MATERIALIZED_VIEW, + addDbAndTabToOutputs(qualTabName, TableType.MATERIALIZED_VIEW, false, tblProps, storageFormat); queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW); qb.setViewDesc(createVwDesc); diff --git a/ql/src/test/queries/clientpositive/catalog_database_table.q b/ql/src/test/queries/clientpositive/catalog_database_table.q new file mode 100644 index 000000000000..24f06a6d50b2 --- /dev/null +++ b/ql/src/test/queries/clientpositive/catalog_database_table.q @@ -0,0 +1,24 @@ +-- SORT_QUERY_RESULTS + +create catalog testcat location '/tmp/testcat' comment 'Hive test catalog'; + +-- create database in new catalog testcat by catalog.db pattern +create database testcat.testdb1; + +-- switch current db to testcat.testdb1 +use testcat.testdb1; + +-- create tbl in the current db testcat.testdb1 +create table test1(id int); + +-- create tbl in db testcat.testdb1 by cat.db.tbl syntax +create table testcat.testdb1.test2(id int); + +-- show tables in current db testcat.testdb1 +show tables; + +-- show create table in current db testcat.testdb1 +show create table test1; + +-- show create table use cat.db.tbl syntax +show create table testcat.testdb1.test2; diff --git a/ql/src/test/results/clientpositive/llap/catalog_database_table.q.out b/ql/src/test/results/clientpositive/llap/catalog_database_table.q.out new file mode 100644 index 000000000000..7e63ec304950 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/catalog_database_table.q.out @@ -0,0 +1,81 @@ +#### A masked pattern was here #### +PREHOOK: type: CREATECATALOG +PREHOOK: Output: catalog:testcat +#### A masked pattern was here #### +POSTHOOK: type: CREATECATALOG +POSTHOOK: Output: catalog:testcat +#### A masked pattern was here #### +PREHOOK: query: create database testcat.testdb1 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:testdb1 +POSTHOOK: query: create database testcat.testdb1 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:testdb1 +PREHOOK: query: use testcat.testdb1 +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:testdb1 +POSTHOOK: query: use testcat.testdb1 +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:testdb1 +PREHOOK: query: create table test1(id int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:testdb1 +PREHOOK: Output: testdb1@test1 +POSTHOOK: query: create table test1(id int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:testdb1 +POSTHOOK: Output: testdb1@test1 +PREHOOK: query: create table testcat.testdb1.test2(id int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:testdb1 +PREHOOK: Output: testdb1@test2 +POSTHOOK: query: create table testcat.testdb1.test2(id int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:testdb1 +POSTHOOK: Output: testdb1@test2 +PREHOOK: query: show tables +PREHOOK: type: SHOWTABLES +PREHOOK: Input: database:testdb1 +POSTHOOK: query: show tables +POSTHOOK: type: SHOWTABLES +POSTHOOK: Input: database:testdb1 +test1 +test2 +PREHOOK: query: show create table test1 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: testdb1@test1 +POSTHOOK: query: show create table test1 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: testdb1@test1 +CREATE TABLE `test1`( + `id` int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( + 'bucketing_version'='2', +#### A masked pattern was here #### +PREHOOK: query: show create table testcat.testdb1.test2 +PREHOOK: type: SHOW_CREATETABLE +PREHOOK: Input: testdb1@test2 +POSTHOOK: query: show create table testcat.testdb1.test2 +POSTHOOK: type: SHOW_CREATETABLE +POSTHOOK: Input: testdb1@test2 +CREATE TABLE `testdb1`.`test2`( + `id` int) +ROW FORMAT SERDE + 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' +STORED AS INPUTFORMAT + 'org.apache.hadoop.mapred.TextInputFormat' +OUTPUTFORMAT + 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat' +LOCATION +#### A masked pattern was here #### +TBLPROPERTIES ( + 'bucketing_version'='2', +#### A masked pattern was here ####