diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index c34124f410c1..1cd99c3daa2b 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -20,6 +20,9 @@ import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.isIndexTable; +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.getDefaultCatalog; +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.prependCatalogToDbName; +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.isCatalogEnabled; import java.io.IOException; import java.lang.reflect.Constructor; @@ -125,6 +128,8 @@ public class HiveMetaStoreClient implements IMetaStoreClient { static final protected Logger LOG = LoggerFactory.getLogger("hive.metastore"); + private String currentCatalog; + public HiveMetaStoreClient(HiveConf conf) throws MetaException { this(conf, null, true); } @@ -136,6 +141,12 @@ public HiveMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader) throws public HiveMetaStoreClient(HiveConf conf, HiveMetaHookLoader hookLoader, Boolean allowEmbedded) throws MetaException { + if (isCatalogEnabled(conf)) { + this.currentCatalog = getDefaultCatalog(conf); + } else { + this.currentCatalog = ""; + } + this.hookLoader = hookLoader; if (conf == null) { conf = new HiveConf(HiveMetaStoreClient.class); @@ -371,7 +382,7 @@ public void alter_table(String defaultDatabaseName, String tblName, Table table, @Override public void alter_table_with_environmentContext(String dbname, String tbl_name, Table new_tbl, EnvironmentContext envContext) throws InvalidOperationException, MetaException, TException { - client.alter_table_with_environment_context(dbname, tbl_name, new_tbl, envContext); + client.alter_table_with_environment_context(prependCatalogToDbName(dbname, conf, getCurrentCatalog()), tbl_name, new_tbl, envContext); } /** @@ -388,7 +399,7 @@ public void alter_table_with_environmentContext(String dbname, String tbl_name, @Override public void renamePartition(final String dbname, final String name, final List part_vals, final Partition newPart) throws InvalidOperationException, MetaException, TException { - client.rename_partition(dbname, name, part_vals, newPart); + client.rename_partition(prependCatalogToDbName(dbname, conf, getCurrentCatalog()), name, part_vals, newPart); } private void open() throws MetaException { @@ -604,6 +615,7 @@ public Partition add_partition(Partition new_part) public Partition add_partition(Partition new_part, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { + if (!new_part.isSetCatName() && isCatalogEnabled(conf, getCurrentCatalog())) new_part.setCatName(getCurrentCatalog()); Partition p = client.add_partition_with_environment_context(new_part, envContext); return fastpath ? p : deepCopy(p); } @@ -620,6 +632,12 @@ public Partition add_partition(Partition new_part, EnvironmentContext envContext public int add_partitions(List new_parts) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { + if (isCatalogEnabled(conf, getCurrentCatalog()) && new_parts != null && !new_parts.isEmpty() && !new_parts.get(0).isSetCatName()) { + final String defaultCat = getCurrentCatalog(); + for(Partition p: new_parts) { + p.setCatName(defaultCat); + } + } return client.add_partitions(new_parts); } @@ -633,6 +651,16 @@ public List add_partitions( Partition part = parts.get(0); AddPartitionsRequest req = new AddPartitionsRequest( part.getDbName(), part.getTableName(), parts, ifNotExists); + + if(isCatalogEnabled(conf, getCurrentCatalog())) { + final String defaultCat = getCurrentCatalog(); + req.setCatName(defaultCat); + // Make sure all of the partitions have the catalog set as well + for(Partition p: parts) { + if (!p.isSetCatName()) p.setCatName(defaultCat); + } + } + req.setNeedResult(needResults); AddPartitionsResult result = client.add_partitions_req(req); return needResults ? filterHook.filterPartitions(result.getPartitions()) : null; @@ -640,6 +668,7 @@ public List add_partitions( @Override public int add_partitions_pspec(PartitionSpecProxy partitionSpec) throws TException { + if (isCatalogEnabled(conf, getCurrentCatalog()) && partitionSpec.getCatName() == null) partitionSpec.setCatName(getCurrentCatalog()); return client.add_partitions_pspec(partitionSpec.toPartitionSpec()); } @@ -665,7 +694,7 @@ public Partition appendPartition(String db_name, String table_name, public Partition appendPartition(String db_name, String table_name, List part_vals, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { - Partition p = client.append_partition_with_environment_context(db_name, table_name, + Partition p = client.append_partition_with_environment_context(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), table_name, part_vals, envContext); return fastpath ? p : deepCopy(p); } @@ -679,8 +708,8 @@ public Partition appendPartition(String dbName, String tableName, String partNam public Partition appendPartition(String dbName, String tableName, String partName, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { - Partition p = client.append_partition_by_name_with_environment_context(dbName, tableName, - partName, envContext); + Partition p = client.append_partition_by_name_with_environment_context(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), + tableName, partName, envContext); return fastpath ? p : deepCopy(p); } @@ -696,8 +725,8 @@ public Partition exchange_partition(Map partitionSpecs, String sourceDb, String sourceTable, String destDb, String destinationTableName) throws MetaException, NoSuchObjectException, InvalidObjectException, TException { - return client.exchange_partition(partitionSpecs, sourceDb, sourceTable, - destDb, destinationTableName); + return client.exchange_partition(partitionSpecs, prependCatalogToDbName(sourceDb, conf, getCurrentCatalog()), sourceTable, + prependCatalogToDbName(destDb, conf, getCurrentCatalog()), destinationTableName); } /** @@ -712,8 +741,8 @@ public List exchange_partitions(Map partitionSpecs, String sourceDb, String sourceTable, String destDb, String destinationTableName) throws MetaException, NoSuchObjectException, InvalidObjectException, TException { - return client.exchange_partitions(partitionSpecs, sourceDb, sourceTable, - destDb, destinationTableName); + return client.exchange_partitions(partitionSpecs, prependCatalogToDbName(sourceDb, conf, getCurrentCatalog()), sourceTable, + prependCatalogToDbName(destDb, conf, getCurrentCatalog()), destinationTableName); } @Override @@ -734,6 +763,7 @@ public void validatePartitionNameCharacters(List partVals) @Override public void createDatabase(Database db) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { + if (!db.isSetCatalogName() && isCatalogEnabled(conf, getCurrentCatalog())) db.setCatalogName(getCurrentCatalog()); client.create_database(db); } @@ -747,6 +777,7 @@ public void createDatabase(Database db) @Override public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { + if (!tbl.isSetCatName() && isCatalogEnabled(conf, getCurrentCatalog())) tbl.setCatName(getCurrentCatalog()); createTable(tbl, null); } @@ -875,7 +906,7 @@ public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownD } } } - client.drop_database(name, deleteData, cascade); + client.drop_database(prependCatalogToDbName(name, conf, getCurrentCatalog()), deleteData, cascade); } /** @@ -923,8 +954,8 @@ public boolean dropPartition(String dbName, String tableName, String partName, b public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException, MetaException, TException { - return client.drop_partition_by_name_with_environment_context(dbName, tableName, partName, - deleteData, envContext); + return client.drop_partition_by_name_with_environment_context(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), tableName, + partName, deleteData, envContext); } /** @@ -957,8 +988,8 @@ public boolean dropPartition(String db_name, String tbl_name, public boolean dropPartition(String db_name, String tbl_name, List part_vals, boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException, MetaException, TException { - return client.drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, - envContext); + return client.drop_partition_with_environment_context(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, part_vals, + deleteData, envContext); } @Override @@ -975,6 +1006,9 @@ public List dropPartitions(String dbName, String tblName, } rps.setExprs(exprs); DropPartitionsRequest req = new DropPartitionsRequest(dbName, tblName, rps); + if(isCatalogEnabled(conf, getCurrentCatalog())) { + req.setCatName(getCurrentCatalog()); + } req.setDeleteData(options.deleteData); req.setNeedResult(options.returnResults); req.setIfExists(options.ifExists); @@ -1154,7 +1188,7 @@ public Map getTypeAll(String name) throws MetaException, public List getDatabases(String databasePattern) throws MetaException { try { - return filterHook.filterDatabases(client.get_databases(databasePattern)); + return filterHook.filterDatabases(client.get_databases(prependCatalogToDbName(databasePattern, conf, getCurrentCatalog()))); } catch (Exception e) { MetaStoreUtils.logAndThrowMetaException(e); } @@ -1164,12 +1198,7 @@ public List getDatabases(String databasePattern) /** {@inheritDoc} */ @Override public List getAllDatabases() throws MetaException { - try { - return filterHook.filterDatabases(client.get_all_databases()); - } catch (Exception e) { - MetaStoreUtils.logAndThrowMetaException(e); - } - return null; + return getDatabases("*"); } /** @@ -1184,21 +1213,22 @@ public List getAllDatabases() throws MetaException { @Override public List listPartitions(String db_name, String tbl_name, short max_parts) throws NoSuchObjectException, MetaException, TException { - List parts = client.get_partitions(db_name, tbl_name, max_parts); + List parts = client.get_partitions(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, max_parts); return fastpath ? parts : deepCopyPartitions(filterHook.filterPartitions(parts)); } @Override public PartitionSpecProxy listPartitionSpecs(String dbName, String tableName, int maxParts) throws TException { return PartitionSpecProxy.Factory.get(filterHook.filterPartitionSpecs( - client.get_partitions_pspec(dbName, tableName, maxParts))); + client.get_partitions_pspec(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), tableName, maxParts))); } @Override public List listPartitions(String db_name, String tbl_name, List part_vals, short max_parts) throws NoSuchObjectException, MetaException, TException { - List parts = client.get_partitions_ps(db_name, tbl_name, part_vals, max_parts); + List parts = client.get_partitions_ps(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, part_vals, + max_parts); return fastpath ? parts : deepCopyPartitions(filterHook.filterPartitions(parts)); } @@ -1206,7 +1236,7 @@ public List listPartitions(String db_name, String tbl_name, public List listPartitionsWithAuthInfo(String db_name, String tbl_name, short max_parts, String user_name, List group_names) throws NoSuchObjectException, MetaException, TException { - List parts = client.get_partitions_with_auth(db_name, tbl_name, max_parts, + List parts = client.get_partitions_with_auth(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, max_parts, user_name, group_names); return fastpath ? parts :deepCopyPartitions(filterHook.filterPartitions(parts)); } @@ -1216,7 +1246,7 @@ public List listPartitionsWithAuthInfo(String db_name, String tbl_name, List part_vals, short max_parts, String user_name, List group_names) throws NoSuchObjectException, MetaException, TException { - List parts = client.get_partitions_ps_with_auth(db_name, + List parts = client.get_partitions_ps_with_auth(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, part_vals, max_parts, user_name, group_names); return fastpath ? parts : deepCopyPartitions(filterHook.filterPartitions(parts)); } @@ -1239,7 +1269,8 @@ public List listPartitionsWithAuthInfo(String db_name, public List listPartitionsByFilter(String db_name, String tbl_name, String filter, short max_parts) throws MetaException, NoSuchObjectException, TException { - List parts = client.get_partitions_by_filter(db_name, tbl_name, filter, max_parts); + List parts = client.get_partitions_by_filter(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, filter, + max_parts); return fastpath ? parts :deepCopyPartitions(filterHook.filterPartitions(parts)); } @@ -1248,7 +1279,7 @@ public PartitionSpecProxy listPartitionSpecsByFilter(String db_name, String tbl_ String filter, int max_parts) throws MetaException, NoSuchObjectException, TException { return PartitionSpecProxy.Factory.get(filterHook.filterPartitionSpecs( - client.get_part_specs_by_filter(db_name, tbl_name, filter, max_parts))); + client.get_part_specs_by_filter(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, filter, max_parts))); } @Override @@ -1297,7 +1328,7 @@ public boolean listPartitionsByExpr(String db_name, String tbl_name, byte[] expr @Override public Database getDatabase(String name) throws NoSuchObjectException, MetaException, TException { - Database d = client.get_database(name); + Database d = client.get_database(prependCatalogToDbName(name, conf, getCurrentCatalog())); return fastpath ? d :deepCopy(filterHook.filterDatabase(d)); } @@ -1314,20 +1345,21 @@ public Database getDatabase(String name) throws NoSuchObjectException, @Override public Partition getPartition(String db_name, String tbl_name, List part_vals) throws NoSuchObjectException, MetaException, TException { - Partition p = client.get_partition(db_name, tbl_name, part_vals); + Partition p = client.get_partition(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, part_vals); return fastpath ? p : deepCopy(filterHook.filterPartition(p)); } @Override public List getPartitionsByNames(String db_name, String tbl_name, List part_names) throws NoSuchObjectException, MetaException, TException { - List parts = client.get_partitions_by_names(db_name, tbl_name, part_names); + List parts = client.get_partitions_by_names(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, part_names); return fastpath ? parts : deepCopyPartitions(filterHook.filterPartitions(parts)); } @Override public PartitionValuesResponse listPartitionValues(PartitionValuesRequest request) throws MetaException, TException, NoSuchObjectException { + if (!request.isSetCatName() && isCatalogEnabled(conf, getCurrentCatalog())) request.setCatName(getCurrentCatalog()); return client.get_partition_values(request); } @@ -1336,7 +1368,7 @@ public Partition getPartitionWithAuthInfo(String db_name, String tbl_name, List part_vals, String user_name, List group_names) throws MetaException, UnknownTableException, NoSuchObjectException, TException { - Partition p = client.get_partition_with_auth(db_name, tbl_name, part_vals, user_name, + Partition p = client.get_partition_with_auth(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, part_vals, user_name, group_names); return fastpath ? p : deepCopy(filterHook.filterPartition(p)); } @@ -1355,7 +1387,7 @@ public Partition getPartitionWithAuthInfo(String db_name, String tbl_name, @Override public Table getTable(String dbname, String name) throws MetaException, TException, NoSuchObjectException { - Table t = client.get_table(dbname, name); + Table t = client.get_table(prependCatalogToDbName(dbname, conf, getCurrentCatalog()), name); return fastpath ? t : deepCopy(filterHook.filterTable(t)); } @@ -1372,7 +1404,7 @@ public Table getTable(String tableName) throws MetaException, TException, @Override public List getTableObjectsByName(String dbName, List tableNames) throws MetaException, InvalidOperationException, UnknownDBException, TException { - List
tabs = client.get_table_objects_by_name(dbName, tableNames); + List
tabs = client.get_table_objects_by_name(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), tableNames); return fastpath ? tabs : deepCopyTables(filterHook.filterTables(tabs)); } @@ -1381,7 +1413,7 @@ public List
getTableObjectsByName(String dbName, List tableNames) public List listTableNamesByFilter(String dbName, String filter, short maxTables) throws MetaException, TException, InvalidOperationException, UnknownDBException { return filterHook.filterTableNames(dbName, - client.get_table_names_by_filter(dbName, filter, maxTables)); + client.get_table_names_by_filter(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), filter, maxTables)); } /** @@ -1400,7 +1432,7 @@ public Type getType(String name) throws NoSuchObjectException, MetaException, TE @Override public List getTables(String dbname, String tablePattern) throws MetaException { try { - return filterHook.filterTableNames(dbname, client.get_tables(dbname, tablePattern)); + return filterHook.filterTableNames(dbname, client.get_tables(prependCatalogToDbName(dbname, conf, getCurrentCatalog()), tablePattern)); } catch (Exception e) { MetaStoreUtils.logAndThrowMetaException(e); } @@ -1411,7 +1443,8 @@ public List getTables(String dbname, String tablePattern) throws MetaExc @Override public List getTables(String dbname, String tablePattern, TableType tableType) throws MetaException { try { - return filterHook.filterTableNames(dbname, client.get_tables_by_type(dbname, tablePattern, tableType.toString())); + return filterHook.filterTableNames(dbname, client.get_tables_by_type(prependCatalogToDbName(dbname, conf, getCurrentCatalog()), + tablePattern, tableType.toString())); } catch (Exception e) { MetaStoreUtils.logAndThrowMetaException(e); } @@ -1422,7 +1455,7 @@ public List getTables(String dbname, String tablePattern, TableType tabl public List getTableMeta(String dbPatterns, String tablePatterns, List tableTypes) throws MetaException { try { - return filterNames(client.get_table_meta(dbPatterns, tablePatterns, tableTypes)); + return filterNames(client.get_table_meta(prependCatalogToDbName(dbPatterns, conf, getCurrentCatalog()), tablePatterns, tableTypes)); } catch (Exception e) { MetaStoreUtils.logAndThrowMetaException(e); } @@ -1453,7 +1486,7 @@ private List filterNames(List metas) throws MetaException @Override public List getAllTables(String dbname) throws MetaException { try { - return filterHook.filterTableNames(dbname, client.get_all_tables(dbname)); + return filterHook.filterTableNames(dbname, client.get_all_tables(prependCatalogToDbName(dbname, conf, getCurrentCatalog()))); } catch (Exception e) { MetaStoreUtils.logAndThrowMetaException(e); } @@ -1464,7 +1497,7 @@ public List getAllTables(String dbname) throws MetaException { public boolean tableExists(String databaseName, String tableName) throws MetaException, TException, UnknownDBException { try { - return filterHook.filterTable(client.get_table(databaseName, tableName)) != null; + return filterHook.filterTable(client.get_table(prependCatalogToDbName(databaseName, conf, getCurrentCatalog()), tableName)) != null; } catch (NoSuchObjectException e) { return false; } @@ -1482,7 +1515,7 @@ public boolean tableExists(String tableName) throws MetaException, public List listPartitionNames(String dbName, String tblName, short max) throws MetaException, TException { return filterHook.filterPartitionNames(dbName, tblName, - client.get_partition_names(dbName, tblName, max)); + client.get_partition_names(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), tblName, max)); } @Override @@ -1490,7 +1523,7 @@ public List listPartitionNames(String db_name, String tbl_name, List part_vals, short max_parts) throws MetaException, TException, NoSuchObjectException { return filterHook.filterPartitionNames(db_name, tbl_name, - client.get_partition_names_ps(db_name, tbl_name, part_vals, max_parts)); + client.get_partition_names_ps(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, part_vals, max_parts)); } /** @@ -1509,22 +1542,27 @@ public List listPartitionNames(String db_name, String tbl_name, public int getNumPartitionsByFilter(String db_name, String tbl_name, String filter) throws MetaException, NoSuchObjectException, TException { - return client.get_num_partitions_by_filter(db_name, tbl_name, filter); + return client.get_num_partitions_by_filter(prependCatalogToDbName(db_name, conf, getCurrentCatalog()), tbl_name, filter); } @Override public void alter_partition(String dbName, String tblName, Partition newPart) throws InvalidOperationException, MetaException, TException { client.alter_partition(dbName, tblName, newPart); + // alter_partition(dbName, tblName, newPart, null); } @Override public void alter_partition(String dbName, String tblName, Partition newPart, EnvironmentContext environmentContext) throws InvalidOperationException, MetaException, TException { + if (isCatalogEnabled(conf, getCurrentCatalog()) && !newPart.isSetCatName()) { + newPart.setCatName(getCurrentCatalog()); + } + String dbNameWithCatalog = prependCatalogToDbName(dbName, conf, getCurrentCatalog()); if (environmentContext == null) { - client.alter_partition(dbName, tblName, newPart); + client.alter_partition(dbNameWithCatalog, tblName, newPart); } else { - client.alter_partition_with_environment_context(dbName, tblName, newPart, environmentContext); + client.alter_partition_with_environment_context(dbNameWithCatalog, tblName, newPart, environmentContext); } } @@ -1537,17 +1575,24 @@ public void alter_partitions(String dbName, String tblName, List newP @Override public void alter_partitions(String dbName, String tblName, List newParts, EnvironmentContext environmentContext) throws InvalidOperationException, MetaException, TException { + if (isCatalogEnabled(conf, getCurrentCatalog())) { + final String defaultCat = getCurrentCatalog(); + for(Partition p: newParts) { + if (!p.isSetCatName()) p.setCatName(defaultCat); + } + } + String dbNameWithCatalog = prependCatalogToDbName(dbName, conf, getCurrentCatalog()); if (environmentContext == null) { - client.alter_partitions(dbName, tblName, newParts); + client.alter_partitions(dbNameWithCatalog, tblName, newParts); } else { - client.alter_partitions_with_environment_context(dbName, tblName, newParts, environmentContext); + client.alter_partitions_with_environment_context(dbNameWithCatalog, tblName, newParts, environmentContext); } } @Override public void alterDatabase(String dbName, Database db) throws MetaException, NoSuchObjectException, TException { - client.alter_database(dbName, db); + client.alter_database(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), db); } /** * @param db @@ -1563,7 +1608,7 @@ public void alterDatabase(String dbName, Database db) public List getFields(String db, String tableName) throws MetaException, TException, UnknownTableException, UnknownDBException { - List fields = client.get_fields(db, tableName); + List fields = client.get_fields(prependCatalogToDbName(db, conf, getCurrentCatalog()), tableName); return fastpath ? fields : deepCopyFieldSchemas(fields); } @@ -1757,7 +1802,7 @@ public List getSchema(String db, String tableName) envCxt = new EnvironmentContext(props); } - List fields = client.get_schema_with_environment_context(db, tableName, envCxt); + List fields = client.get_schema_with_environment_context(prependCatalogToDbName(db, conf, getCurrentCatalog()), tableName, envCxt); return fastpath ? fields : deepCopyFieldSchemas(fields); } @@ -1770,7 +1815,7 @@ public String getConfigValue(String name, String defaultValue) @Override public Partition getPartition(String db, String tableName, String partName) throws MetaException, TException, UnknownTableException, NoSuchObjectException { - Partition p = client.get_partition_by_name(db, tableName, partName); + Partition p = client.get_partition_by_name(prependCatalogToDbName(db, conf, getCurrentCatalog()), tableName, partName); return fastpath ? p : deepCopy(filterHook.filterPartition(p)); } @@ -1782,8 +1827,8 @@ public Partition appendPartitionByName(String dbName, String tableName, String p public Partition appendPartitionByName(String dbName, String tableName, String partName, EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { - Partition p = client.append_partition_by_name_with_environment_context(dbName, tableName, - partName, envContext); + Partition p = client.append_partition_by_name_with_environment_context(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), + tableName, partName, envContext); return fastpath ? p : deepCopy(p); } @@ -1795,8 +1840,8 @@ public boolean dropPartitionByName(String dbName, String tableName, String partN public boolean dropPartitionByName(String dbName, String tableName, String partName, boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException, MetaException, TException { - return client.drop_partition_by_name_with_environment_context(dbName, tableName, partName, - deleteData, envContext); + return client.drop_partition_by_name_with_environment_context(prependCatalogToDbName(dbName, conf, getCurrentCatalog()), tableName, + partName, deleteData, envContext); } private HiveMetaHook getHook(Table tbl) throws MetaException { @@ -2419,7 +2464,7 @@ protected void create_table_with_environment_context(Table tbl, EnvironmentConte protected void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext envContext) throws MetaException, TException, NoSuchObjectException, UnsupportedOperationException { - client.drop_table_with_environment_context(dbname, name, deleteData, envContext); + client.drop_table_with_environment_context(prependCatalogToDbName(dbname, conf, getCurrentCatalog()), name, deleteData, envContext); } @Override @@ -2564,4 +2609,12 @@ public boolean cacheFileMetadata( CacheFileMetadataResult result = client.cache_file_metadata(req); return result.isIsSupported(); } + + public void setCurrentCatalog(String catalogName) { + this.currentCatalog = catalogName; + } + + public String getCurrentCatalog() { + return this.currentCatalog; + } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index a037de5d7514..7df185243f7c 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -97,6 +97,28 @@ public class MetaStoreUtils { protected static final Logger LOG = LoggerFactory.getLogger("hive.log"); + // The following two are public for any external users who wish to use them. + /** + * This character is used to mark a database name as having a catalog name prepended. This + * marker should be placed first in the String to make it easy to determine that this has both + * a catalog and a database name. @ is chosen as it is not used in regular expressions. This + * is only intended for use when making old Thrift calls that do not support catalog names. + */ + public static final char CATALOG_DB_THRIFT_NAME_MARKER = '@'; + + /** + * This String is used to seaprate the catalog name from the database name. This should only + * be used in Strings that are prepended with {@link #CATALOG_DB_THRIFT_NAME_MARKER}. # is + * chosen because it is not used in regular expressions. this is only intended for use when + * making old Thrift calls that do not support catalog names. + */ + public static final String CATALOG_DB_SEPARATOR = "#"; + + /** + * Mark a database as being empty (as distinct from null). + */ + public static final String DB_EMPTY_MARKER = "!"; + public static final String DEFAULT_DATABASE_NAME = "default"; public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database"; public static final String DEFAULT_SERIALIZATION_FORMAT = "1"; @@ -1977,4 +1999,67 @@ public static void mergeColStats(ColumnStatistics csNew, ColumnStatistics csOld) } csNew.setStatsObj(list); } + + /** + * Prepend the default catalog onto the database name if property "metastore.catalog.default" is configured, + * otherwise return the database name. + * @param dbName database name + * @param conf configuration object, used to determine default catalog + * @return a database name with or without the catalog name prepended. + */ + public static String prependCatalogToDbName(String dbName, HiveConf conf, String currentCatalog) { + StringBuilder buf = new StringBuilder(); + + if(isCatalogEnabled(conf, currentCatalog)) { + buf.append(CATALOG_DB_THRIFT_NAME_MARKER) + .append(currentCatalog) + .append(CATALOG_DB_SEPARATOR); + } + + if (dbName != null) { + if (dbName.isEmpty()) buf.append(DB_EMPTY_MARKER); + else buf.append(dbName); + } + return buf.toString(); + } + + public static Boolean isCatalogEnabled(HiveConf conf) { + if (conf == null) { + return false; + } + + String catName = conf.get(CATALOG_DEFAULT); + if (catName == null || "".equals(catName)) return false; + return true; + } + + public static Boolean isCatalogEnabled(HiveConf conf, String currentCatalog) { + if (conf == null) { + return false; + } + + String catName = conf.get(CATALOG_DEFAULT); + if ((catName == null || "".equals(catName)) && "".equals(currentCatalog)) return false; + return true; + } + + public static String getDefaultCatalog(HiveConf conf) { + // If catalog is disabled, we should return an empty string so we don't mistakenly + // use `Warehouse.DEFAULT_CATALOG_NAME` as catalog name. + if (!isCatalogEnabled(conf)) { + return ""; + } + + if (conf == null) { + LOG.warn("Configuration is null, so going with default catalog."); + return Warehouse.DEFAULT_CATALOG_NAME; + } + + String catName = conf.get(CATALOG_DEFAULT); + if (catName == null || "".equals(catName)) catName = Warehouse.DEFAULT_CATALOG_NAME; + return catName; + } + + //"The default catalog to use when a catalog is not specified. Default is 'hive' (the default catalog). + public static final String CATALOG_DEFAULT = "metastore.catalog.default"; } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java index 27283d83e8a2..b56592a0ca23 100755 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -58,6 +58,8 @@ * This class represents a warehouse where data of Hive tables is stored */ public class Warehouse { + public static final String DEFAULT_CATALOG_NAME = "hive"; + private Path whRoot; private final Configuration conf; private final String whRootString; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java index 7e94e34295b9..23a13f1a14bf 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/CompositePartitionSpecProxy.java @@ -33,6 +33,7 @@ */ public class CompositePartitionSpecProxy extends PartitionSpecProxy { + private String catName; private String dbName; private String tableName; private List partitionSpecs; @@ -42,10 +43,12 @@ public class CompositePartitionSpecProxy extends PartitionSpecProxy { protected CompositePartitionSpecProxy(List partitionSpecs) { this.partitionSpecs = partitionSpecs; if (partitionSpecs.isEmpty()) { + catName = null; dbName = null; tableName = null; } else { + catName = partitionSpecs.get(0).getCatName(); dbName = partitionSpecs.get(0).getDbName(); tableName = partitionSpecs.get(0).getTableName(); this.partitionSpecProxies = new ArrayList(partitionSpecs.size()); @@ -60,6 +63,7 @@ protected CompositePartitionSpecProxy(List partitionSpecs) { } protected CompositePartitionSpecProxy(String dbName, String tableName, List partitionSpecs) { + this.catName = null; this.dbName = dbName; this.tableName = tableName; this.partitionSpecs = partitionSpecs; @@ -147,6 +151,11 @@ public Partition getCurrent() { return iterator.getCurrent(); } + @Override + public String getCatName() { + return composite.getCatName(); + } + @Override public String getDbName() { return composite.dbName; @@ -183,6 +192,14 @@ public void setCreateTime(long time) { } } + @Override + public void setCatName(String catName) { + this.catName = catName; + for (PartitionSpecProxy partSpecProxy : partitionSpecProxies) { + partSpecProxy.setCatName(catName); + } + } + @Override public void setDbName(String dbName) { this.dbName = dbName; @@ -199,6 +216,11 @@ public void setTableName(String tableName) { } } + @Override + public String getCatName() { + return catName; + } + @Override public String getDbName() { return dbName; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java index 154011ebf5a9..478478df7a1a 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionListComposingSpecProxy.java @@ -39,6 +39,11 @@ protected PartitionListComposingSpecProxy(PartitionSpec partitionSpec) { this.partitionSpec = partitionSpec; } + @Override + public String getCatName() { + return partitionSpec.getCatName(); + } + @Override public String getDbName() { return partitionSpec.getDbName(); @@ -64,6 +69,14 @@ public int size() { return partitionSpec.getPartitionList().getPartitionsSize(); } + @Override + public void setCatName(String catName) { + partitionSpec.setCatName(catName); + for (Partition partition : partitionSpec.getPartitionList().getPartitions()) { + partition.setCatName(catName); + } + } + @Override public void setDbName(String dbName) { partitionSpec.setDbName(dbName); @@ -117,6 +130,11 @@ public Partition getCurrent() { return partitionList.get(index); } + @Override + public String getCatName() { + return partitionSpecProxy.getCatName(); + } + @Override public String getDbName() { return partitionSpecProxy.getDbName(); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java index fdb086749be9..ac531dbe9243 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecProxy.java @@ -36,6 +36,12 @@ public abstract class PartitionSpecProxy { */ public abstract int size(); + /** + * Set catalog name. + * @param catName catalog name. + */ + public abstract void setCatName(String catName); + /** * Setter for name of the DB. * @param dbName The name of the DB. @@ -48,6 +54,12 @@ public abstract class PartitionSpecProxy { */ public abstract void setTableName(String tableName); + /** + * Get catalog name. + * @return catalog name. + */ + public abstract String getCatName(); + /** * Getter for name of the DB. * @return The name of the DB. @@ -130,6 +142,12 @@ public static interface PartitionIterator extends java.util.Iterator */ public Partition getCurrent(); + /** + * Get the catalog name. + * @return catalog name. + */ + String getCatName(); + /** * Getter for the name of the DB. * @return Name of the DB. @@ -184,6 +202,7 @@ public static class SimplePartitionWrapperIterator implements PartitionIterator public SimplePartitionWrapperIterator(Partition partition) {this.partition = partition;} @Override public Partition getCurrent() { return partition; } + @Override public String getCatName() { return partition.getCatName(); } @Override public String getDbName() { return partition.getDbName(); } @Override public String getTableName() { return partition.getTableName(); } @Override public Map getParameters() { return partition.getParameters(); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java index 6a3e147eea5a..8ca77bc354ba 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/partition/spec/PartitionSpecWithSharedSDProxy.java @@ -48,6 +48,11 @@ public int size() { return partitionSpec.getSharedSDPartitionSpec().getPartitionsSize(); } + @Override + public void setCatName(String catName) { + partitionSpec.setCatName(catName); + } + @Override public void setDbName(String dbName) { partitionSpec.setDbName(dbName); @@ -58,6 +63,11 @@ public void setTableName(String tableName) { partitionSpec.setTableName(tableName); } + @Override + public String getCatName() { + return partitionSpec.getCatName(); + } + @Override public String getDbName() { return partitionSpec.getDbName(); @@ -121,7 +131,7 @@ public Partition getCurrent() { StorageDescriptor partSD = new StorageDescriptor(pSpec.getSd()); partSD.setLocation(partSD.getLocation() + partWithoutSD.getRelativePath()); - return new Partition( + Partition p = new Partition( partWithoutSD.getValues(), partitionSpecWithSharedSDProxy.partitionSpec.getDbName(), partitionSpecWithSharedSDProxy.partitionSpec.getTableName(), @@ -130,6 +140,13 @@ public Partition getCurrent() { partSD, partWithoutSD.getParameters() ); + p.setCatName(partitionSpecWithSharedSDProxy.partitionSpec.getCatName()); + return p; + } + + @Override + public String getCatName() { + return partitionSpecWithSharedSDProxy.partitionSpec.getCatName(); } @Override