Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HBASE-26586 Should not rely on the global config when setting SFT implementation for a table while upgrading #4006

Merged
merged 2 commits into from
Jan 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.regionserver.storefiletracker.MigrateStoreFileTrackerProcedure;
import org.apache.hadoop.hbase.regionserver.storefiletracker.InitializeStoreFileTrackerProcedure;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
Expand All @@ -60,7 +60,7 @@ public class RollingUpgradeChore extends ScheduledChore {
private final static Logger LOG = LoggerFactory.getLogger(RollingUpgradeChore.class);
ProcedureExecutor<MasterProcedureEnv> procedureExecutor;
private TableDescriptors tableDescriptors;
private List<MigrateStoreFileTrackerProcedure> processingProcs = new ArrayList<>();
private List<InitializeStoreFileTrackerProcedure> processingProcs = new ArrayList<>();

public RollingUpgradeChore(MasterServices masterServices) {
this(masterServices.getConfiguration(), masterServices.getMasterProcedureExecutor(),
Expand Down Expand Up @@ -89,9 +89,9 @@ protected void chore() {
}

private boolean isCompletelyMigrateSFT(int concurrentCount){
Iterator<MigrateStoreFileTrackerProcedure> iter = processingProcs.iterator();
Iterator<InitializeStoreFileTrackerProcedure> iter = processingProcs.iterator();
while(iter.hasNext()){
MigrateStoreFileTrackerProcedure proc = iter.next();
InitializeStoreFileTrackerProcedure proc = iter.next();
if(procedureExecutor.isFinished(proc.getProcId())){
iter.remove();
}
Expand Down Expand Up @@ -120,8 +120,8 @@ private boolean isCompletelyMigrateSFT(int concurrentCount){

for (Map.Entry<String, TableDescriptor> entry : migrateSFTTables.entrySet()) {
TableDescriptor tableDescriptor = entry.getValue();
MigrateStoreFileTrackerProcedure proc =
new MigrateStoreFileTrackerProcedure(procedureExecutor.getEnvironment(), tableDescriptor);
InitializeStoreFileTrackerProcedure proc = new InitializeStoreFileTrackerProcedure(
procedureExecutor.getEnvironment(), tableDescriptor);
procedureExecutor.submitProcedure(proc);
processingProcs.add(proc);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,28 +19,32 @@

import java.util.Optional;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.master.procedure.ModifyTableDescriptorProcedure;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;

/**
* Procedure for migrating StoreFileTracker information to table descriptor.
* Procedure for setting StoreFileTracker information to table descriptor.
*/
@InterfaceAudience.Private
public class MigrateStoreFileTrackerProcedure extends ModifyTableDescriptorProcedure {
public class InitializeStoreFileTrackerProcedure extends ModifyTableDescriptorProcedure {

public MigrateStoreFileTrackerProcedure(){}
public InitializeStoreFileTrackerProcedure(){}

public MigrateStoreFileTrackerProcedure(MasterProcedureEnv env, TableDescriptor unmodified) {
public InitializeStoreFileTrackerProcedure(MasterProcedureEnv env, TableDescriptor unmodified) {
super(env, unmodified);
}

@Override
protected Optional<TableDescriptor> modify(MasterProcedureEnv env, TableDescriptor current) {
if (StringUtils.isEmpty(current.getValue(StoreFileTrackerFactory.TRACKER_IMPL))) {
// no tracker impl means it is a table created in previous version, the tracker impl can only
// be default.
TableDescriptor td =
StoreFileTrackerFactory.updateWithTrackerConfigs(env.getMasterConfiguration(), current);
TableDescriptorBuilder.newBuilder(current).setValue(StoreFileTrackerFactory.TRACKER_IMPL,
StoreFileTrackerFactory.Trackers.DEFAULT.name()).build();
return Optional.of(td);
}
return Optional.empty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
*/
package org.apache.hadoop.hbase.master.migrate;

import static org.junit.Assert.assertEquals;

import java.io.IOException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -30,6 +32,7 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
Expand All @@ -39,11 +42,11 @@
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category(MediumTests.class)
public class TestMigrateStoreFileTracker {
@Category({ MediumTests.class, MasterTests.class })
public class TestInitializeStoreFileTracker {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMigrateStoreFileTracker.class);
HBaseClassTestRule.forClass(TestInitializeStoreFileTracker.class);
private final static String[] tables = new String[] { "t1", "t2", "t3", "t4", "t5", "t6" };
private final static String famStr = "f1";
private final static byte[] fam = Bytes.toBytes(famStr);
Expand All @@ -55,9 +58,12 @@ public class TestMigrateStoreFileTracker {
@Before
public void setUp() throws Exception {
conf = HBaseConfiguration.create();
//Speed up the launch of RollingUpgradeChore
// Speed up the launch of RollingUpgradeChore
conf.setInt(RollingUpgradeChore.ROLLING_UPGRADE_CHORE_PERIOD_SECONDS_KEY, 1);
conf.setLong(RollingUpgradeChore.ROLLING_UPGRADE_CHORE_DELAY_SECONDS_KEY, 1);
// Set the default implementation to file instead of default, to confirm we will not set SFT to
// file
conf.set(StoreFileTrackerFactory.TRACKER_IMPL, StoreFileTrackerFactory.Trackers.FILE.name());
HTU = new HBaseTestingUtil(conf);
HTU.startMiniCluster();
}
Expand Down Expand Up @@ -88,7 +94,7 @@ public void testMigrateStoreFileTracker() throws IOException, InterruptedExcepti
HTU.getMiniHBaseCluster().stopMaster(0).join();
HTU.getMiniHBaseCluster().startMaster();
HTU.getMiniHBaseCluster().waitForActiveAndReadyMaster(30000);
//wait until all tables have been migrated
// wait until all tables have been migrated
TableDescriptors tds = HTU.getMiniHBaseCluster().getMaster().getTableDescriptors();
HTU.waitFor(30000, () -> {
try {
Expand All @@ -103,5 +109,10 @@ public void testMigrateStoreFileTracker() throws IOException, InterruptedExcepti
return false;
}
});
for (String table : tables) {
TableDescriptor td = tds.get(TableName.valueOf(table));
assertEquals(StoreFileTrackerFactory.Trackers.DEFAULT.name(),
td.getValue(StoreFileTrackerFactory.TRACKER_IMPL));
}
}
}