Skip to content

Commit

Permalink
HBASE-26326 CreateTableProcedure fails when FileBasedStoreFileTracker… (
Browse files Browse the repository at this point in the history
apache#3721)

Amend HBASE-26326 - HBASE-21515 has been challenging to backport into this branch.
For HBASE-26326, we actually only need the changes HBASE-21515 added to ReflectionUtils,
so I had given up on attempting to bring in HBASE-21515 and am adding these required changes
as an amend to HBASE-26326.

Change-Id: I82e9e2f44a2b64fa0423531cd1775ce55a1e1607
Signed-off-by: Duo Zhang <zhangduo@apache.org>
Signed-off-by: Josh Elser <elserj@apache.org>
  • Loading branch information
wchevreuil authored and Josh Elser committed Dec 22, 2021
1 parent fb4997a commit 4d27053
Show file tree
Hide file tree
Showing 11 changed files with 76 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,20 @@ public static <T> Constructor<T> findConstructor(Class<T> type, Object... paramT

boolean match = true;
for (int i = 0; i < ctorParamTypes.length && match; ++i) {
Class<?> paramType = paramTypes[i].getClass();
match = (!ctorParamTypes[i].isPrimitive()) ? ctorParamTypes[i].isAssignableFrom(paramType) :
((int.class.equals(ctorParamTypes[i]) && Integer.class.equals(paramType)) ||
(long.class.equals(ctorParamTypes[i]) && Long.class.equals(paramType)) ||
(double.class.equals(ctorParamTypes[i]) && Double.class.equals(paramType)) ||
(char.class.equals(ctorParamTypes[i]) && Character.class.equals(paramType)) ||
(short.class.equals(ctorParamTypes[i]) && Short.class.equals(paramType)) ||
(boolean.class.equals(ctorParamTypes[i]) && Boolean.class.equals(paramType)) ||
(byte.class.equals(ctorParamTypes[i]) && Byte.class.equals(paramType)));
if (paramTypes[i] == null) {
match = !ctorParamTypes[i].isPrimitive();
} else {
Class<?> paramType = paramTypes[i].getClass();
match = (!ctorParamTypes[i].isPrimitive()) ?
ctorParamTypes[i].isAssignableFrom(paramType) :
((int.class.equals(ctorParamTypes[i]) && Integer.class.equals(paramType)) ||
(long.class.equals(ctorParamTypes[i]) && Long.class.equals(paramType)) ||
(double.class.equals(ctorParamTypes[i]) && Double.class.equals(paramType)) ||
(char.class.equals(ctorParamTypes[i]) && Character.class.equals(paramType)) ||
(short.class.equals(ctorParamTypes[i]) && Short.class.equals(paramType)) ||
(boolean.class.equals(ctorParamTypes[i]) && Boolean.class.equals(paramType)) ||
(byte.class.equals(ctorParamTypes[i]) && Byte.class.equals(paramType)));
}
}

if (match) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.MasterFileSystem;
Expand Down Expand Up @@ -272,9 +271,8 @@ private void preCreate(final MasterProcedureEnv env)
(newRegions != null ? newRegions.size() : 0));
}

TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
StoreFileTrackerFactory.persistTrackerConfig(env.getMasterConfiguration(), builder);
tableDescriptor = builder.build();
tableDescriptor = StoreFileTrackerFactory.updateWithTrackerConfigs(env.getMasterConfiguration(),
tableDescriptor);

final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
if (cpHost != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,10 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.util.ChecksumType;
import org.apache.hadoop.hbase.CompoundConfiguration;
Expand Down Expand Up @@ -170,6 +173,14 @@ public static int getBytesPerChecksum(Configuration conf) {
HFile.DEFAULT_BYTES_PER_CHECKSUM);
}

public static Configuration createStoreConfiguration(Configuration conf, TableDescriptor td,
ColumnFamilyDescriptor cfd) {
// CompoundConfiguration will look for keys in reverse order of addition, so we'd
// add global config first, then table and cf overrides, then cf metadata.
return new CompoundConfiguration().add(conf).addBytesMap(td.getValues())
.addStringMap(cfd.getConfiguration()).addBytesMap(cfd.getValues());
}

public static List<StoreFileInfo> toStoreFileInfo(Collection<HStoreFile> storefiles) {
return storefiles.stream().map(HStoreFile::getFileInfo).collect(Collectors.toList());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,14 @@ class FileBasedStoreFileTracker extends StoreFileTrackerBase {

public FileBasedStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreContext ctx) {
super(conf, isPrimaryReplica, ctx);
backedFile = new StoreFileListFile(ctx);
//CreateTableProcedure needs to instantiate the configured SFT impl, in order to update table
//descriptors with the SFT impl specific configs. By the time this happens, the table has no
//regions nor stores yet, so it can't create a proper StoreContext.
if (ctx != null) {
backedFile = new StoreFileListFile(ctx);
} else {
backedFile = null;
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,18 +89,6 @@ void set(List<StoreFileInfo> files) {
"Should not call this method on " + getClass().getSimpleName());
}

@Override
public void persistConfiguration(TableDescriptorBuilder builder) {
super.persistConfiguration(builder);
TableDescriptor desc = builder.build();
if (StringUtils.isEmpty(desc.getValue(SRC_IMPL))) {
builder.setValue(SRC_IMPL, src.getTrackerName());
}
if (StringUtils.isEmpty(desc.getValue(DST_IMPL))) {
builder.setValue(DST_IMPL, dst.getTrackerName());
}
}

static Class<? extends StoreFileTracker> getSrcTrackerClass(Configuration conf) {
return StoreFileTrackerFactory.getStoreFileTrackerClassForMigration(conf, SRC_IMPL);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Collection;
import java.util.List;

import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
Expand Down Expand Up @@ -75,13 +76,13 @@ void replace(Collection<StoreFileInfo> compactedFiles, Collection<StoreFileInfo>
StoreFileWriter createWriter(CreateStoreFileWriterParams params) throws IOException;

/**
* Saves StoreFileTracker implementations specific configurations into the table descriptors.
* Adds StoreFileTracker implementations specific configurations into the table descriptor.
* <p/>
* This is used to avoid accidentally data loss when changing the cluster level store file tracker
* implementation, and also possible misconfiguration between master and region servers.
* <p/>
* See HBASE-26246 for more details.
* @param builder The table descriptor builder for the given table.
*/
void persistConfiguration(TableDescriptorBuilder builder);
TableDescriptorBuilder updateWithTrackerConfigs(TableDescriptorBuilder builder);
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
Expand Down Expand Up @@ -84,11 +83,9 @@ public final void replace(Collection<StoreFileInfo> compactedFiles,
}

@Override
public void persistConfiguration(TableDescriptorBuilder builder) {
TableDescriptor desc = builder.build();
if (StringUtils.isEmpty(desc.getValue(TRACKER_IMPL))) {
builder.setValue(TRACKER_IMPL, getTrackerName());
}
public TableDescriptorBuilder updateWithTrackerConfigs(TableDescriptorBuilder builder) {
builder.setValue(TRACKER_IMPL, getTrackerName());
return builder;
}

protected final String getTrackerName() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
Expand All @@ -27,6 +29,7 @@
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
Expand Down Expand Up @@ -115,16 +118,13 @@ public static StoreFileTracker create(Configuration conf, TableDescriptor td,
ColumnFamilyDescriptor cfd, HRegionFileSystem regionFs) {
StoreContext ctx =
StoreContext.getBuilder().withColumnFamilyDescriptor(cfd).withRegionFileSystem(regionFs)
.withFamilyStoreDirectoryPath(regionFs.getStoreDir(cfd.getNameAsString()))
.withFamilyStoreDirectoryPath(regionFs.getStoreDir(cfd.getNameAsString()))
.build();
.withFamilyStoreDirectoryPath(regionFs.getStoreDir(cfd.getNameAsString())).build();
return StoreFileTrackerFactory.create(mergeConfigurations(conf, td, cfd), true, ctx);
}

private static Configuration mergeConfigurations(Configuration global, TableDescriptor table,
ColumnFamilyDescriptor family) {
return new CompoundConfiguration().add(global).addBytesMap(table.getValues())
.addStringMap(family.getConfiguration()).addBytesMap(family.getValues());
return StoreUtils.createStoreConfiguration(global, table, family);
}

static Class<? extends StoreFileTrackerBase>
Expand Down Expand Up @@ -161,12 +161,18 @@ static StoreFileTrackerBase createForMigration(Configuration conf, String config
return ReflectionUtils.newInstance(tracker, conf, isPrimaryReplica, ctx);
}

public static void persistTrackerConfig(Configuration conf, TableDescriptorBuilder builder) {
TableDescriptor tableDescriptor = builder.build();
ColumnFamilyDescriptor cfDesc = tableDescriptor.getColumnFamilies()[0];
StoreContext context = StoreContext.getBuilder().withColumnFamilyDescriptor(cfDesc).build();
StoreFileTracker tracker = StoreFileTrackerFactory.create(conf, true, context);
tracker.persistConfiguration(builder);
public static TableDescriptor updateWithTrackerConfigs(Configuration conf,
TableDescriptor descriptor) {
//CreateTableProcedure needs to instantiate the configured SFT impl, in order to update table
//descriptors with the SFT impl specific configs. By the time this happens, the table has no
//regions nor stores yet, so it can't create a proper StoreContext.
if (StringUtils.isEmpty(descriptor.getValue(TRACKER_IMPL))) {
StoreFileTracker tracker =
StoreFileTrackerFactory.create(conf, true, null);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(descriptor);
return tracker.updateWithTrackerConfigs(builder).build();
}
return descriptor;
}

// should not use MigrationStoreFileTracker for new family
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.regionserver.storefiletracker.TestStoreFileTracker;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
Expand Down Expand Up @@ -105,6 +106,21 @@ public void testCreateWithTrackImpl() throws Exception {
assertEquals(trackerName, htd.getValue(TRACKER_IMPL));
}

@Test
public void testCreateWithFileBasedStoreTrackerImpl() throws Exception {
ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
procExec.getEnvironment().getMasterConfiguration().set(StoreFileTrackerFactory.TRACKER_IMPL,
StoreFileTrackerFactory.Trackers.FILE.name());
final TableName tableName = TableName.valueOf(name.getMethodName());
TableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, F1);
RegionInfo[] regions = ModifyRegionUtils.createRegionInfos(htd, null);
long procId = ProcedureTestingUtility.submitAndWait(procExec,
new CreateTableProcedure(procExec.getEnvironment(), htd, regions));
ProcedureTestingUtility.assertProcNotFailed(procExec.getResult(procId));
htd = getMaster().getTableDescriptors().get(tableName);
assertEquals(StoreFileTrackerFactory.Trackers.FILE.name(), htd.getValue(TRACKER_IMPL));
}

@Test
public void testCreateWithoutColumnFamily() throws Exception {
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public static void setUp() throws Exception {
}

@AfterClass
public static void tearDown() throws IOException {
public static void tearDown() throws Exception {
UTIL.shutdownMiniCluster();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class TestStoreFileTracker extends DefaultStoreFileTracker {

public TestStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreContext ctx) {
super(conf, isPrimaryReplica, ctx);
if (ctx.getRegionFileSystem() != null) {
if (ctx != null && ctx.getRegionFileSystem() != null) {
this.storeId = ctx.getRegionInfo().getEncodedName() + "-" + ctx.getFamily().getNameAsString();
LOG.info("created storeId: {}", storeId);
trackedFiles.computeIfAbsent(storeId, v -> new ArrayList<>());
Expand Down

0 comments on commit 4d27053

Please sign in to comment.