Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 21 additions & 6 deletions parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
Original file line number Diff line number Diff line change
Expand Up @@ -243,12 +243,27 @@ tableName
@init { gParent.pushMsg("table name", state); }
@after { gParent.popMsg(state); }
:
db=identifier DOT tab=identifier (DOT meta=identifier)?
{tables.add(new ImmutablePair<>($db.text, $tab.text));}
-> ^(TOK_TABNAME $db $tab $meta?)
|
tab=identifier
{tables.add(new ImmutablePair<>(null, $tab.text));}
// case 1:catalog.db.table(.meta)?
(cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)?)
=>
cat=identifier DOT db=identifier DOT tab=identifier (DOT meta=identifier)?
{
tables.add(new ImmutablePair<>($cat.text + "." + $db.text, $tab.text));
}
-> ^(TOK_TABNAME $cat $db $tab $meta?)

// case 2:db.table
Copy link
Contributor

@Aggarwal-Raghav Aggarwal-Raghav Dec 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@zhangbutao , For iceberg meta tables like snapshots, files etc. the select * from db.tbl.meta should work which is the existing behaviour. Meaning, case 2 should have meta? IMO. Won't it misinterpret this example select with the case 1?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes. There will be a breaking change here. We cannot distinguish between the semantic expressions cat.db.tbl and db.tbl.meta, so for Iceberg's meta tables, I will switch to using the format cat.db.tbl.meta, or reference Trino's approach as shown at https://trino.io/docs/current/connector/iceberg.html#metadata-tables, for example: SELECT * FROM "test_table$properties". In any case, this will cause backward compatibility issues.

Regarding this breaking change, I will initiate a discussion in the dev community mailing list to see if anyone has better suggestions.

| db=identifier DOT tab=identifier
{
tables.add(new ImmutablePair<>($db.text, $tab.text));
}
-> ^(TOK_TABNAME $db $tab)

// case 3:table
| tab=identifier
{
tables.add(new ImmutablePair<>(null, $tab.text));
}
-> ^(TOK_TABNAME $tab)
;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,12 +233,13 @@ private static boolean isIcebergTable(Map<String, String> tblProps) {
.equalsIgnoreCase(tblProps.get(META_TABLE_STORAGE));
}

private String getDefaultLocation(String dbName, String tableName, boolean isExt)
throws SemanticException {
private String getDefaultLocation(TableName qualifiedTabName, boolean isExt)
throws SemanticException {
String tblLocation;
try {
Warehouse wh = new Warehouse(conf);
tblLocation = wh.getDefaultTablePath(db.getDatabase(dbName), tableName, isExt).toUri().getPath();
tblLocation = wh.getDefaultTablePath(db.getDatabase(qualifiedTabName.getCat(),
qualifiedTabName.getDb()), qualifiedTabName.getTable(), isExt).toUri().getPath();
} catch (MetaException | HiveException e) {
throw new SemanticException(e);
}
Expand All @@ -255,7 +256,7 @@ private String getDefaultLocation(String dbName, String tableName, boolean isExt
*/
private Map<String, String> validateAndAddDefaultProperties(Map<String, String> tblProp, boolean isExt,
StorageFormat storageFormat, String qualifiedTableName, List<Order> sortCols, boolean isMaterialization,
boolean isTemporaryTable, boolean isTransactional, boolean isManaged, String[] qualifiedTabName,
boolean isTemporaryTable, boolean isTransactional, boolean isManaged, TableName qualifiedTabName,
boolean isTableTypeChanged)
throws SemanticException {
Map<String, String> retValue = Optional.ofNullable(tblProp).orElseGet(HashMap::new);
Expand Down Expand Up @@ -316,7 +317,7 @@ private Map<String, String> validateAndAddDefaultProperties(Map<String, String>

if (isIcebergTable(retValue)) {
SessionStateUtil.addResourceOrThrow(conf, SessionStateUtil.DEFAULT_TABLE_LOCATION,
getDefaultLocation(qualifiedTabName[0], qualifiedTabName[1], true));
getDefaultLocation(qualifiedTabName, true));
}
return retValue;
}
Expand Down Expand Up @@ -348,7 +349,8 @@ private void updateDefaultTblProps(Map<String, String> source, Map<String, Strin
*/
ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
throws SemanticException {
TableName qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
String currentCatalog = HiveUtils.getCurrentCatalogOrDefault(conf);
TableName qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0), currentCatalog);
final String dbDotTab = qualifiedTabName.getNotEmptyDbTable();

String likeTableName = null;
Expand Down Expand Up @@ -636,11 +638,9 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
}
tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary, isTransactional, isManaged,
new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()}, isDefaultTableTypeChanged);
isTemporary, isTransactional, isManaged, qualifiedTabName, isDefaultTableTypeChanged);
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
if (!Strings.isNullOrEmpty(sortOrder)) {
tblProps.put("default-sort-order", sortOrder);
}
Expand All @@ -665,7 +665,7 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
if (location != null) {
tblLocation = location;
} else {
tblLocation = getDefaultLocation(qualifiedTabName.getDb(), qualifiedTabName.getTable(), isExt);
tblLocation = getDefaultLocation(qualifiedTabName, isExt);
}
boolean isNativeColumnDefaultSupported = false;
try {
Expand Down Expand Up @@ -699,11 +699,9 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
}
tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary, isTransactional, isManaged,
new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()}, isDefaultTableTypeChanged);
isTemporary, isTransactional, isManaged, qualifiedTabName, isDefaultTableTypeChanged);
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, false, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, false, tblProps, storageFormat);

CreateTableDesc crtTranTblDesc =
new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets,
Expand All @@ -726,14 +724,10 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)

tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary,

isTransactional, isManaged, new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
isDefaultTableTypeChanged);
isTemporary, isTransactional, isManaged, qualifiedTabName, isDefaultTableTypeChanged);
tblProps.put(hive_metastoreConstants.TABLE_IS_CTLT, "true");
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);

Table likeTable = getTable(likeTableName, false);
if (likeTable != null) {
Expand All @@ -753,7 +747,7 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
new CreateTableLikeDesc(dbDotTab, isExt, isTemporary, storageFormat.getInputFormat(),
storageFormat.getOutputFormat(), location, storageFormat.getSerde(), storageFormat.getSerdeProps(),
tblProps, ifNotExists, likeTableName, isUserStorageFormat);
tblLocation = getDefaultLocation(qualifiedTabName.getDb(), qualifiedTabName.getTable(), isExt);
tblLocation = getDefaultLocation(qualifiedTabName, isExt);
SessionStateUtil.addResource(conf, META_TABLE_LOCATION, tblLocation);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblLikeDesc)));
break;
Expand Down Expand Up @@ -815,7 +809,7 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
}
tblLocation = location;
} else {
tblLocation = getDefaultLocation(qualifiedTabName.getDb(), qualifiedTabName.getTable(), isExt);
tblLocation = getDefaultLocation(qualifiedTabName, isExt);
}
SessionStateUtil.addResource(conf, META_TABLE_LOCATION, tblLocation);
if (!CollectionUtils.isEmpty(partCols)) {
Expand All @@ -825,11 +819,10 @@ ASTNode analyzeCreateTable(ASTNode ast, QB qb, PlannerContext plannerCtx)
tblProps =
validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
isTemporary, isTransactional, isManaged,
new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()}, isDefaultTableTypeChanged);
qualifiedTabName, isDefaultTableTypeChanged);
isExt = isExternalTableChanged(tblProps, isTransactional, isExt, isDefaultTableTypeChanged);
tblProps.put(TABLE_IS_CTAS, "true");
addDbAndTabToOutputs(new String[]{qualifiedTabName.getDb(), qualifiedTabName.getTable()},
TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
addDbAndTabToOutputs(qualifiedTabName, TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
tableDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partColNames, bucketCols, sortCols,
numBuckets, rowFormatParams.getFieldDelim(), rowFormatParams.getFieldEscape(),
rowFormatParams.getCollItemDelim(), rowFormatParams.getMapKeyDelim(), rowFormatParams.getLineDelim(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@

package org.apache.hadoop.hive.ql.ddl.table.create.show;

import java.util.Map.Entry;

import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
Expand All @@ -46,17 +46,21 @@ public ShowCreateTableAnalyzer(QueryState queryState) throws SemanticException {
public void analyzeInternal(ASTNode root) throws SemanticException {
ctx.setResFile(ctx.getLocalTmpPath());

Entry<String, String> tableIdentifier = getDbTableNamePair((ASTNode) root.getChild(0));
if (tableIdentifier.getValue().contains(".")) {
Triple<String, String, String> tableIdentifier = getCatDbTableNameTriple((ASTNode) root.getChild(0));
if (tableIdentifier.toString().contains(".")) {
throw new SemanticException("The SHOW CREATE TABLE command is not supported for metadata tables.");
}
Table table = getTable(tableIdentifier.getKey(), tableIdentifier.getValue(), true);
String catName = tableIdentifier.getLeft();
String dbName = tableIdentifier.getMiddle();
String tblName = tableIdentifier.getRight();
TableName tableName = new TableName(catName, dbName, tblName);
Table table = getTable(tableName, true);

inputs.add(new ReadEntity(table));

// If no DB was specified in statement, do not include it in the final output
ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getDbName(), table.getTableName(),
ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getKey()));
ShowCreateTableDesc desc = new ShowCreateTableDesc(table.getCatName(), table.getDbName(), table.getTableName(),
ctx.getResFile().toString(), StringUtils.isBlank(tableIdentifier.getMiddle()));
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,14 @@ public class ShowCreateTableDesc implements DDLDesc, Serializable {

public static final String SCHEMA = "createtab_stmt#string";

private final String catalogName;
private final String databaseName;
private final String tableName;
private final String resFile;
private final boolean isRelative;

public ShowCreateTableDesc(String databaseName, String tableName, String resFile, boolean isRelative) {
public ShowCreateTableDesc(String catalogName, String databaseName, String tableName, String resFile, boolean isRelative) {
this.catalogName = catalogName;
this.databaseName = databaseName;
this.tableName = tableName;
this.resFile = resFile;
Expand All @@ -60,6 +62,11 @@ public String getDatabaseName() {
return databaseName;
}

@Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getCatalogName() {
return catalogName;
}

@Explain(displayName = "relative table location", explainLevels = { Level.EXTENDED })
public boolean isRelative() {
return isRelative;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,10 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.ShowUtils;
Expand All @@ -50,7 +47,8 @@ public ShowCreateTableOperation(DDLOperationContext context, ShowCreateTableDesc
public int execute() throws HiveException {
// get the create table statement for the table and populate the output
try (DataOutputStream outStream = ShowUtils.getOutputStream(new Path(desc.getResFile()), context)) {
Table table = context.getDb().getTable(desc.getDatabaseName(), desc.getTableName());
TableName tn = new TableName(desc.getCatalogName(), desc.getDatabaseName(), desc.getTableName());
Table table = context.getDb().getTable(tn, true);
DDLPlanUtils ddlObj = new DDLPlanUtils();
String command;
if (table.isView()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
Expand All @@ -49,6 +50,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException {

ctx.setResFile(ctx.getLocalTmpPath());

String catName = HiveUtils.getCurrentCatalogOrDefault(conf);
String dbName = SessionState.get().getCurrentDatabase();
String tableNames = null;
TableType tableTypeFilter = null;
Expand All @@ -57,7 +59,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException {
ASTNode child = (ASTNode) root.getChild(i);
if (child.getType() == HiveParser.TOK_FROM) { // Specifies a DB
dbName = unescapeIdentifier(root.getChild(++i).getText());
db.validateDatabaseExists(dbName);
db.validateDatabaseExists(catName,dbName);
} else if (child.getType() == HiveParser.TOK_TABLE_TYPE) { // Filter on table type
String tableType = unescapeIdentifier(child.getChild(0).getText());
if (!"table_type".equalsIgnoreCase(tableType)) {
Expand All @@ -73,7 +75,7 @@ public void analyzeInternal(ASTNode root) throws SemanticException {

inputs.add(new ReadEntity(getDatabase(dbName)));

ShowTablesDesc desc = new ShowTablesDesc(ctx.getResFile(), dbName, tableNames, tableTypeFilter, isExtended);
ShowTablesDesc desc = new ShowTablesDesc(ctx.getResFile(), catName, dbName, tableNames, tableTypeFilter, isExtended);
Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
rootTasks.add(task);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,15 @@ public class ShowTablesDesc implements DDLDesc, Serializable {
private static final String EXTENDED_TABLES_SCHEMA = "tab_name,table_type#string,string";

private final String resFile;
private final String catName;
private final String dbName;
private final String pattern;
private final TableType typeFilter;
private final boolean isExtended;

public ShowTablesDesc(Path resFile, String dbName, String pattern, TableType typeFilter, boolean isExtended) {
public ShowTablesDesc(Path resFile, String catName, String dbName, String pattern, TableType typeFilter, boolean isExtended) {
this.resFile = resFile.toString();
this.catName = catName;
this.dbName = dbName;
this.pattern = pattern;
this.typeFilter = typeFilter;
Expand All @@ -60,6 +62,11 @@ public String getResFile() {
return resFile;
}

@Explain(displayName = "catalog name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getCatName() {
return catName;
}

@Explain(displayName = "database name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getDbName() {
return dbName;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public ShowTablesOperation(DDLOperationContext context, ShowTablesDesc desc) {

@Override
public int execute() throws HiveException {
if (!context.getDb().databaseExists(desc.getDbName())) {
if (!context.getDb().databaseExists(desc.getCatName(), desc.getDbName())) {
throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, desc.getDbName());
}

Expand All @@ -61,7 +61,7 @@ public int execute() throws HiveException {
private void showTables() throws HiveException {
String pattern = UDFLike.likePatternToRegExp(desc.getPattern(), false, true);
List<String> tableNames = new ArrayList<>(
context.getDb().getTablesByType(desc.getDbName(), pattern, desc.getTypeFilter()));
context.getDb().getTablesByType(desc.getCatName(), desc.getDbName(), pattern, desc.getTypeFilter()));
Collections.sort(tableNames);
LOG.debug("Found {} table(s) matching the SHOW TABLES statement.", tableNames.size());

Expand All @@ -79,7 +79,7 @@ private void showTablesExtended() throws HiveException {
TableType typeFilter = desc.getTypeFilter();
TableType[] tableTypes = typeFilter == null ? TableType.values() : new TableType[]{typeFilter};
for (TableType tableType : tableTypes) {
List<String> tables = context.getDb().getTablesByType(desc.getDbName(), pattern, tableType);
List<String> tables = context.getDb().getTablesByType(desc.getCatName(), desc.getDbName(), pattern, tableType);
tables.forEach(name -> tableNameToType.put(name, tableType.toString()));
}
LOG.debug("Found {} table(s) matching the SHOW EXTENDED TABLES statement.", tableNameToType.size());
Expand Down
Loading