From fb4997a0ec3639d71cb605bee90e752fa29406ed Mon Sep 17 00:00:00 2001 From: Duo Zhang Date: Fri, 17 Sep 2021 09:40:44 +0800 Subject: [PATCH] HBASE-26280 Use store file tracker when snapshoting (#3685) Signed-off-by: Wellington Chevreuil Reviewed-by: Josh Elser * Fix broken JUnit parameterization * Also adds an one-liner fix to HBTU to be smarter when dropping tables. Change-Id: I11bed0526715781d14476207e60ffa13cdbcdeba --- .../MergeTableRegionsProcedure.java | 5 +- .../assignment/SplitTableRegionProcedure.java | 5 +- .../hbase/regionserver/HRegionFileSystem.java | 10 +--- .../StoreFileTrackerFactory.java | 19 +++---- .../hbase/snapshot/SnapshotManifest.java | 34 +++++------- .../hadoop/hbase/HBaseTestingUtility.java | 3 ++ ...apshotFromClientCloneLinksAfterDelete.java | 4 +- .../client/TestMobSnapshotFromClient.java | 13 +++-- .../hbase/client/TestSnapshotFromClient.java | 36 ++++++++++--- ...tSnapshotFromClientWithRegionReplicas.java | 8 +++ .../hbase/regionserver/TestHStoreFile.java | 6 +-- .../snapshot/MobSnapshotTestingUtils.java | 53 ++++++++++++------- .../hbase/snapshot/SnapshotTestingUtils.java | 1 + 13 files changed, 116 insertions(+), 81 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java index 5a9c5cfc025c..530772a9ade0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java @@ -613,9 +613,8 @@ private List mergeStoreFiles(MasterProcedureEnv env, HRegionFileSystem reg List mergedFiles = new ArrayList<>(); for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) { String family = hcd.getNameAsString(); - Configuration trackerConfig = - StoreFileTrackerFactory.mergeConfigurations(env.getMasterConfiguration(), htd, hcd); - StoreFileTracker tracker = StoreFileTrackerFactory.create(trackerConfig, family, regionFs); + StoreFileTracker tracker = + StoreFileTrackerFactory.create(env.getMasterConfiguration(), htd, hcd, regionFs); final Collection storeFiles = tracker.load(); if (storeFiles != null && storeFiles.size() > 0) { for (StoreFileInfo storeFileInfo : storeFiles) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java index b7a40686423a..f085222ee6ac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java @@ -670,9 +670,8 @@ private Pair, List> splitStoreFiles(final MasterProcedureEnv en new HashMap>(htd.getColumnFamilyCount()); for (ColumnFamilyDescriptor cfd : htd.getColumnFamilies()) { String family = cfd.getNameAsString(); - Configuration trackerConfig = StoreFileTrackerFactory. - mergeConfigurations(env.getMasterConfiguration(), htd, htd.getColumnFamily(cfd.getName())); - StoreFileTracker tracker = StoreFileTrackerFactory.create(trackerConfig, family, regionFs); + StoreFileTracker tracker = + StoreFileTrackerFactory.create(env.getMasterConfiguration(), htd, cfd, regionFs); Collection sfis = tracker.load(); if (sfis == null) { continue; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java index 3b4ce84ad49a..ade5afea1779 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java @@ -598,7 +598,6 @@ void cleanupDaughterRegion(final RegionInfo regionInfo) throws IOException { * to the proper location in the filesystem. * * @param regionInfo daughter {@link org.apache.hadoop.hbase.client.RegionInfo} - * @throws IOException */ public Path commitDaughterRegion(final RegionInfo regionInfo, List allRegionFiles, MasterProcedureEnv env) throws IOException { @@ -625,12 +624,8 @@ private void insertRegionFilesIntoStoreTracker(List allFiles, MasterProced Map> fileInfoMap = new HashMap<>(); for(Path file : allFiles) { String familyName = file.getParent().getName(); - trackerMap.computeIfAbsent(familyName, t -> { - Configuration config = StoreFileTrackerFactory.mergeConfigurations(conf, tblDesc, - tblDesc.getColumnFamily(Bytes.toBytes(familyName))); - return StoreFileTrackerFactory. - create(config, familyName, regionFs); - }); + trackerMap.computeIfAbsent(familyName, t -> StoreFileTrackerFactory.create(conf, tblDesc, + tblDesc.getColumnFamily(Bytes.toBytes(familyName)), regionFs)); fileInfoMap.computeIfAbsent(familyName, l -> new ArrayList<>()); List infos = fileInfoMap.get(familyName); infos.add(new StoreFileInfo(conf, fs, fs.getFileStatus(file))); @@ -676,7 +671,6 @@ public void createSplitsDir(RegionInfo daughterA, RegionInfo daughterB) throws I * this method is invoked on the Master side, then the RegionSplitPolicy will * NOT have a reference to a Region. * @return Path to created reference. - * @throws IOException */ public Path splitStoreFile(RegionInfo hri, String familyName, HStoreFile f, byte[] splitRow, boolean top, RegionSplitPolicy splitPolicy) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java index 0de302e8e320..651990615f1f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java @@ -23,12 +23,10 @@ import org.apache.hadoop.hbase.CompoundConfiguration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; import org.apache.hadoop.hbase.regionserver.StoreContext; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; @@ -113,18 +111,17 @@ public static StoreFileTracker create(Configuration conf, boolean isPrimaryRepli * Used at master side when splitting/merging regions, as we do not have a Store, thus no * StoreContext at master side. */ - public static StoreFileTracker create(Configuration conf, String family, - HRegionFileSystem regionFs) { - ColumnFamilyDescriptorBuilder fDescBuilder = - ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family)); - StoreContext ctx = StoreContext.getBuilder().withColumnFamilyDescriptor(fDescBuilder.build()) - .withRegionFileSystem(regionFs) - .withFamilyStoreDirectoryPath(regionFs.getStoreDir(family)) + public static StoreFileTracker create(Configuration conf, TableDescriptor td, + ColumnFamilyDescriptor cfd, HRegionFileSystem regionFs) { + StoreContext ctx = + StoreContext.getBuilder().withColumnFamilyDescriptor(cfd).withRegionFileSystem(regionFs) + .withFamilyStoreDirectoryPath(regionFs.getStoreDir(cfd.getNameAsString())) + .withFamilyStoreDirectoryPath(regionFs.getStoreDir(cfd.getNameAsString())) .build(); - return StoreFileTrackerFactory.create(conf, true, ctx); + return StoreFileTrackerFactory.create(mergeConfigurations(conf, td, cfd), true, ctx); } - public static Configuration mergeConfigurations(Configuration global, TableDescriptor table, + private static Configuration mergeConfigurations(Configuration global, TableDescriptor table, ColumnFamilyDescriptor family) { return new CompoundConfiguration().add(global).addBytesMap(table.getValues()) .addStringMap(family.getConfiguration()).addBytesMap(family.getValues()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java index 79e587ac44b6..bb3d8518181a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java @@ -47,7 +47,8 @@ import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; -import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.Threads; @@ -317,26 +318,19 @@ protected void addRegion(final Path tableDir, final RegionInfo regionInfo, Regio // in batches and may miss files being added/deleted. This could be more robust (iteratively // checking to see if we have all the files until we are sure), but the limit is currently // 1000 files/batch, far more than the number of store files under a single column family. - Collection familyNames = regionFs.getFamilies(); - if (familyNames != null) { - for (String familyName: familyNames) { - Object familyData = visitor.familyOpen(regionData, Bytes.toBytes(familyName)); - monitor.rethrowException(); - - Collection storeFiles = regionFs.getStoreFiles(familyName); - if (storeFiles == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("No files under family: " + familyName); - } - continue; - } - - // 2.1. build the snapshot reference for the store - // iterate through all the store's files and create "references". - addReferenceFiles(visitor, regionData, familyData, storeFiles, false); - - visitor.familyClose(regionData, familyData); + for (ColumnFamilyDescriptor cfd : htd.getColumnFamilies()) { + Object familyData = visitor.familyOpen(regionData, cfd.getName()); + monitor.rethrowException(); + StoreFileTracker tracker = StoreFileTrackerFactory.create(conf, htd, cfd, regionFs); + List storeFiles = tracker.load(); + if (storeFiles.isEmpty()) { + LOG.debug("No files under family: {}", cfd.getNameAsString()); + continue; } + // 2.1. build the snapshot reference for the store + // iterate through all the store's files and create "references". + addReferenceFiles(visitor, regionData, familyData, storeFiles, false); + visitor.familyClose(regionData, familyData); } visitor.regionClose(regionData); } catch (IOException e) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 8c18df73cf3f..ac94572f52c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1950,6 +1950,9 @@ public static void setReplicas(Admin admin, TableName table, int replicaCount) * @param tableName existing table */ public void deleteTable(TableName tableName) throws IOException { + if (!getAdmin().tableExists(tableName)) { + return; + } try { getAdmin().disableTable(tableName); } catch (TableNotEnabledException e) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java index 1cae7a06bcc7..3325e7d6f639 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -91,7 +92,8 @@ public static void setUpBeforeClass() throws Exception { @Override protected void createTable() throws IOException, InterruptedException { MobSnapshotTestingUtils.createMobTable(TEST_UTIL, tableName, - SnapshotTestingUtils.getSplitKeys(), getNumReplicas(), DelayFlushCoprocessor.class.getName(), + SnapshotTestingUtils.getSplitKeys(), getNumReplicas(), + StoreFileTrackerFactory.Trackers.DEFAULT.name(), DelayFlushCoprocessor.class.getName(), FAMILY); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java index cdc41b01e87d..895645fe91e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java @@ -20,14 +20,15 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.mob.MobConstants; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.experimental.categories.Category; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; /** * Test create/using/deleting snapshots from the client @@ -35,13 +36,16 @@ * This is an end-to-end test for the snapshot utility */ @Category({LargeTests.class, ClientTests.class}) +@RunWith(Parameterized.class) public class TestMobSnapshotFromClient extends TestSnapshotFromClient { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestMobSnapshotFromClient.class); - private static final Logger LOG = LoggerFactory.getLogger(TestMobSnapshotFromClient.class); + public TestMobSnapshotFromClient(StoreFileTrackerFactory.Trackers trackerImpl) { + super(trackerImpl); + } /** * Setup the config for the cluster @@ -60,6 +64,7 @@ protected static void setupConf(Configuration conf) { @Override protected void createTable() throws Exception { - MobSnapshotTestingUtils.createMobTable(UTIL, TABLE_NAME, getNumReplicas(), TEST_FAM); + MobSnapshotTestingUtils.createMobTable(UTIL, TABLE_NAME, getNumReplicas(), trackerImpl.name(), + TEST_FAM); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index 65704ce3f190..a3c9a7831584 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -23,6 +23,7 @@ import static org.junit.Assert.fail; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; @@ -33,9 +34,12 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.TableNameTestRule; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.snapshot.SnapshotCreationException; import org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException; import org.apache.hadoop.hbase.snapshot.SnapshotManifestV1; @@ -52,7 +56,10 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,6 +73,7 @@ * This is an end-to-end test for the snapshot utility */ @Category({LargeTests.class, ClientTests.class}) +@RunWith(Parameterized.class) public class TestSnapshotFromClient { @ClassRule @@ -83,7 +91,19 @@ public class TestSnapshotFromClient { private static final Pattern MATCH_ALL = Pattern.compile(".*"); @Rule - public TestName name = new TestName(); + public TableNameTestRule name = new TableNameTestRule(); + + StoreFileTrackerFactory.Trackers trackerImpl; + + @Parameters(name = "{index}: tracker={0}") + public static List params() { + return Arrays.asList(new Object[] { StoreFileTrackerFactory.Trackers.DEFAULT }, + new Object[] { StoreFileTrackerFactory.Trackers.FILE }); + } + + public TestSnapshotFromClient(StoreFileTrackerFactory.Trackers tracker) { + this.trackerImpl = tracker; + } /** * Setup the config for the cluster @@ -110,7 +130,6 @@ protected static void setupConf(Configuration conf) { conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true); conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, ConstantSizeRegionSplitPolicy.class.getName()); - } @Before @@ -119,9 +138,10 @@ public void setup() throws Exception { } protected void createTable() throws Exception { - HTableDescriptor htd = new HTableDescriptor(TABLE_NAME); - htd.setRegionReplication(getNumReplicas()); - UTIL.createTable(htd, new byte[][]{TEST_FAM}, null); + TableDescriptor htd = + TableDescriptorBuilder.newBuilder(TABLE_NAME).setRegionReplication(getNumReplicas()) + .setValue(StoreFileTrackerFactory.TRACKER_IMPL, trackerImpl.name()).build(); + UTIL.createTable(htd, new byte[][] { TEST_FAM }, null); } protected int getNumReplicas() { @@ -326,7 +346,7 @@ public void testOfflineTableSnapshotWithEmptyRegions() throws Exception { @Test public void testListTableSnapshots() throws Exception { Admin admin = null; - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = name.getTableName(); try { admin = UTIL.getAdmin(); @@ -411,7 +431,7 @@ public void testListTableSnapshotsWithRegex() throws Exception { @Test public void testDeleteTableSnapshots() throws Exception { Admin admin = null; - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = name.getTableName(); try { admin = UTIL.getAdmin(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClientWithRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClientWithRegionReplicas.java index 6b7f45293503..ab9aa9a798f9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClientWithRegionReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClientWithRegionReplicas.java @@ -18,18 +18,26 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.junit.ClassRule; import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; @Category({LargeTests.class, ClientTests.class}) +@RunWith(Parameterized.class) public class TestSnapshotFromClientWithRegionReplicas extends TestSnapshotFromClient { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestSnapshotFromClientWithRegionReplicas.class); + public TestSnapshotFromClientWithRegionReplicas(StoreFileTrackerFactory.Trackers trackerImpl) { + super(trackerImpl); + } + @Override protected int getNumReplicas() { return 3; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 8194ed3d208f..eff05c59b36b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -61,6 +61,7 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -1110,10 +1111,9 @@ private Path splitStoreFile(final HRegionFileSystem regionFs, final HRegionInfo when(mockEnv.getMasterConfiguration()).thenReturn(new Configuration()); TableDescriptors mockTblDescs = mock(TableDescriptors.class); when(mockServices.getTableDescriptors()).thenReturn(mockTblDescs); - TableDescriptor mockTblDesc = mock(TableDescriptor.class); + TableDescriptor mockTblDesc = TableDescriptorBuilder.newBuilder(hri.getTable()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); when(mockTblDescs.get(any())).thenReturn(mockTblDesc); - ColumnFamilyDescriptor mockCfDesc = mock(ColumnFamilyDescriptor.class); - when(mockTblDesc.getColumnFamily(any())).thenReturn(mockCfDesc); Path regionDir = regionFs.commitDaughterRegion(hri, splitFiles, mockEnv); return new Path(new Path(regionDir, family), path.getName()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java index 9ee433631210..b58f84355194 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; @@ -45,29 +46,40 @@ public class MobSnapshotTestingUtils { /** * Create the Mob Table. */ - public static void createMobTable(final HBaseTestingUtility util, - final TableName tableName, int regionReplication, - final byte[]... families) throws IOException, InterruptedException { - createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(), - regionReplication, families); + public static void createMobTable(final HBaseTestingUtility util, final TableName tableName, + int regionReplication, final byte[]... families) throws IOException, InterruptedException { + createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(), regionReplication, + StoreFileTrackerFactory.Trackers.DEFAULT.name(), families); + } + + public static void createMobTable(final HBaseTestingUtility util, final TableName tableName, + int regionReplication, String storeFileTracker, final byte[]... families) + throws IOException, InterruptedException { + createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(), regionReplication, + storeFileTracker, families); } - public static void createPreSplitMobTable(final HBaseTestingUtility util, - final TableName tableName, int nRegions, final byte[]... families) - throws IOException, InterruptedException { - createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(nRegions), - 1, families); + public static void createPreSplitMobTable(final HBaseTestingUtility util, final TableName tableName, + int nRegions, final byte[]... families) throws IOException, InterruptedException { + createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(nRegions), 1, families); + } + + public static void createMobTable(final HBaseTestingUtility util, final TableName tableName, + final byte[][] splitKeys, int regionReplication, final byte[]... families) + throws IOException, InterruptedException { + createMobTable(util, tableName, splitKeys, regionReplication, + StoreFileTrackerFactory.Trackers.DEFAULT.name(), families); } public static void createMobTable(final HBaseTestingUtility util, final TableName tableName, - final byte[][] splitKeys, int regionReplication, final byte[]... families) - throws IOException, InterruptedException { - createMobTable(util, tableName, splitKeys, regionReplication, null, families); + final byte[][] splitKeys, int regionReplication, String storeFileTracker, + final byte[]... families) throws IOException, InterruptedException { + createMobTable(util, tableName, splitKeys, regionReplication, storeFileTracker, null, families); } - public static void createMobTable(HBaseTestingUtility util, TableName tableName, - byte[][] splitKeys, int regionReplication, String cpClassName, byte[]... families) - throws IOException, InterruptedException { + public static void createMobTable(HBaseTestingUtility util, TableName tableName, byte[][] splitKeys, + int regionReplication, String storeFileTracker, String cpClassName, byte[]... families) + throws IOException, InterruptedException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName).setRegionReplication(regionReplication); for (byte[] family : families) { @@ -77,6 +89,7 @@ public static void createMobTable(HBaseTestingUtility util, TableName tableName, if (!StringUtils.isBlank(cpClassName)) { builder.setCoprocessor(cpClassName); } + builder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, storeFileTracker); util.getAdmin().createTable(builder.build(), splitKeys); SnapshotTestingUtils.waitForTableToBeOnline(util, tableName); assertEquals((splitKeys.length + 1) * regionReplication, @@ -92,8 +105,8 @@ public static void createMobTable(HBaseTestingUtility util, TableName tableName, * @return An Table instance for the created table. * @throws IOException */ - public static Table createMobTable(final HBaseTestingUtility util, - final TableName tableName, final byte[]... families) throws IOException { + public static Table createMobTable(final HBaseTestingUtility util, final TableName tableName, + final byte[]... families) throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (byte[] family : families) { // Disable blooms (they are on by default as of 0.95) but we disable them @@ -135,8 +148,8 @@ public static int countMobRows(final Table table, final byte[]... families) thro } } - public static void verifyMobRowCount(final HBaseTestingUtility util, - final TableName tableName, long expectedRows) throws IOException { + public static void verifyMobRowCount(final HBaseTestingUtility util, final TableName tableName, + long expectedRows) throws IOException { Table table = ConnectionFactory.createConnection(util.getConfiguration()).getTable(tableName); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 423b33614746..0b127ab17e49 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -550,6 +550,7 @@ private Path[] addRegion(final SnapshotProtos.SnapshotDescription desc) throws I RegionData regionData = tableRegions[this.snapshotted++]; ForeignExceptionDispatcher monitor = new ForeignExceptionDispatcher(desc.getName()); SnapshotManifest manifest = SnapshotManifest.create(conf, fs, snapshotDir, desc, monitor); + manifest.addTableDescriptor(htd); manifest.addRegion(regionData.tableDir, regionData.hri); return regionData.files; }