Skip to content

Commit

Permalink
HBAE-24507 Remove HTableDescriptor and HColumnDescriptor (#2186)
Browse files Browse the repository at this point in the history
Signed-off-by: stack <stack@apache.org>
Signed-off-by: Viraj Jasani <vjasani@apache.org>
Signed-off-by: tedyu <yuzhihong@gmail.com>
  • Loading branch information
Apache9 authored Aug 4, 2020
1 parent 148c185 commit d2f5a5f
Show file tree
Hide file tree
Showing 182 changed files with 1,960 additions and 5,132 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,8 @@ static void createNamespaceAndTable(final Admin admin) throws IOException {
+ "], with one Column Family ["
+ Bytes.toString(MY_COLUMN_FAMILY_NAME) + "].");

admin.createTable(new TableDescriptorBuilder.ModifyableTableDescriptor(MY_TABLE_NAME)
.setColumnFamily(
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(
MY_COLUMN_FAMILY_NAME)));
admin.createTable(TableDescriptorBuilder.newBuilder(MY_TABLE_NAME)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(MY_COLUMN_FAMILY_NAME)).build());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,13 @@
import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.cleaner.LogCleaner;
import org.apache.hadoop.hbase.master.cleaner.TimeToLiveLogCleaner;
Expand Down Expand Up @@ -84,7 +84,7 @@ public class TestBackupBase {
protected static Configuration conf2;

protected static TableName table1 = TableName.valueOf("table1");
protected static TableDescriptorBuilder.ModifyableTableDescriptor table1Desc;
protected static TableDescriptor table1Desc;
protected static TableName table2 = TableName.valueOf("table2");
protected static TableName table3 = TableName.valueOf("table3");
protected static TableName table4 = TableName.valueOf("table4");
Expand Down Expand Up @@ -428,20 +428,17 @@ protected static void createTables() throws Exception {
ha.createNamespace(desc3);
ha.createNamespace(desc4);

TableDescriptorBuilder.ModifyableTableDescriptor desc =
new TableDescriptorBuilder.ModifyableTableDescriptor(table1);
ColumnFamilyDescriptor familyDescriptor =
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(famName);
desc.setColumnFamily(familyDescriptor);
TableDescriptor desc = TableDescriptorBuilder.newBuilder(table1)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(famName)).build();
ha.createTable(desc);
table1Desc = desc;
Connection conn = ConnectionFactory.createConnection(conf1);
Table table = conn.getTable(table1);
loadTable(table);
table.close();
table2 = TableName.valueOf("ns2:test-" + tid + 1);
desc = new TableDescriptorBuilder.ModifyableTableDescriptor(table2);
desc.setColumnFamily(familyDescriptor);
desc = TableDescriptorBuilder.newBuilder(table2)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(famName)).build();
ha.createTable(desc);
table = conn.getTable(table2);
loadTable(table);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand Down Expand Up @@ -82,14 +84,11 @@ public void TestIncBackupRestore() throws Exception {
final byte[] fam3Name = Bytes.toBytes("f3");
final byte[] mobName = Bytes.toBytes("mob");

table1Desc.setColumnFamily(
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam3Name));
ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor mobHcd =
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(mobName);
mobHcd.setMobEnabled(true);
mobHcd.setMobThreshold(5L);
table1Desc.setColumnFamily(mobHcd);
HBaseTestingUtility.modifyTableSync(TEST_UTIL.getAdmin(), table1Desc);
TableDescriptor newTable1Desc = TableDescriptorBuilder.newBuilder(table1Desc)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam3Name))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(mobName).setMobEnabled(true)
.setMobThreshold(5L).build()).build();
TEST_UTIL.getAdmin().modifyTable(newTable1Desc);

try (Connection conn = ConnectionFactory.createConnection(conf1)) {
int NB_ROWS_FAM3 = 6;
Expand Down Expand Up @@ -150,13 +149,12 @@ public void TestIncBackupRestore() throws Exception {
assertTrue(checkSucceeded(backupIdIncMultiple));

// add column family f2 to table1
final byte[] fam2Name = Bytes.toBytes("f2");
table1Desc.setColumnFamily(
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam2Name));

// drop column family f3
table1Desc.removeColumnFamily(fam3Name);
HBaseTestingUtility.modifyTableSync(TEST_UTIL.getAdmin(), table1Desc);
final byte[] fam2Name = Bytes.toBytes("f2");
newTable1Desc = TableDescriptorBuilder.newBuilder(newTable1Desc)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam2Name)).removeColumnFamily(fam3Name)
.build();
TEST_UTIL.getAdmin().modifyTable(newTable1Desc);

int NB_ROWS_FAM2 = 7;
Table t3 = insertIntoTable(conn, table1, fam2Name, 2, NB_ROWS_FAM2);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
import org.apache.hadoop.hbase.backup.impl.BackupAdminImpl;
Expand All @@ -37,6 +36,8 @@
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
Expand Down Expand Up @@ -76,16 +77,15 @@ public TestIncrementalBackupWithFailures(Boolean b) {
// implement all test cases in 1 test since incremental backup/restore has dependencies
@Test
public void testIncBackupRestore() throws Exception {

int ADD_ROWS = 99;
// #1 - create full backup for all tables
LOG.info("create full backup image for all tables");

List<TableName> tables = Lists.newArrayList(table1, table2);
final byte[] fam3Name = Bytes.toBytes("f3");
table1Desc.setColumnFamily(
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam3Name));
HBaseTestingUtility.modifyTableSync(TEST_UTIL.getAdmin(), table1Desc);
TableDescriptor newTable1Desc = TableDescriptorBuilder.newBuilder(table1Desc)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam3Name)).build();
TEST_UTIL.getAdmin().modifyTable(newTable1Desc);

Connection conn = ConnectionFactory.createConnection(conf1);
int NB_ROWS_FAM3 = 6;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

import java.io.IOException;
import java.util.concurrent.CountDownLatch;

import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
Expand All @@ -32,18 +31,21 @@
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.collect.Lists;

@Category(LargeTests.class)
public class TestRemoteBackup extends TestBackupBase {

Expand Down Expand Up @@ -93,16 +95,13 @@ public void testFullBackupRemote() throws Exception {
}
});
t.start();

table1Desc.setColumnFamily(
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam3Name));
// family 2 is MOB enabled
ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor familyDescriptor =
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(fam2Name);
familyDescriptor.setMobEnabled(true);
familyDescriptor.setMobThreshold(0L);
table1Desc.setColumnFamily(familyDescriptor);
TEST_UTIL.getAdmin().modifyTable(table1Desc);
TableDescriptor newTable1Desc = TableDescriptorBuilder.newBuilder(table1Desc)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(fam3Name))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(fam2Name).setMobEnabled(true)
.setMobThreshold(0L).build())
.build();
TEST_UTIL.getAdmin().modifyTable(newTable1Desc);

SnapshotTestingUtils.loadData(TEST_UTIL, table1, 50, fam2Name);
Table t1 = conn.getTable(table1);
Expand Down
Loading

0 comments on commit d2f5a5f

Please sign in to comment.