Skip to content

Commit

Permalink
The expired snapshot can be read by CopyTable or ExportSnapshot (bran…
Browse files Browse the repository at this point in the history
…ch-2)
  • Loading branch information
guluo2016 committed Jul 15, 2024
1 parent 9dee538 commit 1a90b34
Show file tree
Hide file tree
Showing 5 changed files with 194 additions and 40 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;

import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
Expand Down Expand Up @@ -137,9 +138,9 @@ static final class Options {
static final Option NO_CHECKSUM_VERIFY = new Option(null, "no-checksum-verify", false,
"Do not verify checksum, use name+length only.");
static final Option NO_TARGET_VERIFY = new Option(null, "no-target-verify", false,
"Do not verify the integrity of the exported snapshot.");
static final Option NO_SOURCE_VERIFY =
new Option(null, "no-source-verify", false, "Do not verify the source of the snapshot.");
"Do not verify the exported snapshot's expiration status and integrity.");
static final Option NO_SOURCE_VERIFY = new Option(null, "no-source-verify", false,
"Do not verify the source snapshot's expiration status and integrity.");
static final Option OVERWRITE =
new Option(null, "overwrite", false, "Rewrite the snapshot manifest if already exists.");
static final Option CHUSER =
Expand Down Expand Up @@ -933,13 +934,17 @@ private void runCopyJob(final Path inputRoot, final Path outputRoot, final Strin
}
}

private void verifySnapshot(final Configuration baseConf, final FileSystem fs, final Path rootDir,
final Path snapshotDir) throws IOException {
private void verifySnapshot(final SnapshotDescription snapshotDesc, final Configuration baseConf,
final FileSystem fs, final Path rootDir, final Path snapshotDir) throws IOException {
// Update the conf with the current root dir, since may be a different cluster
Configuration conf = new Configuration(baseConf);
CommonFSUtils.setRootDir(conf, rootDir);
CommonFSUtils.setFsDefault(conf, CommonFSUtils.getRootDir(conf));
SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
boolean isExpired = SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(),
snapshotDesc.getCreationTime(), EnvironmentEdgeManager.currentTime());
if (isExpired) {
throw new SnapshotTTLExpiredException(ProtobufUtil.createSnapshotDesc(snapshotDesc));
}
SnapshotReferenceUtil.verifySnapshot(conf, fs, snapshotDir, snapshotDesc);
}

Expand Down Expand Up @@ -1041,14 +1046,14 @@ public int doWork() throws IOException {
if (snapshotName == null) {
System.err.println("Snapshot name not provided.");
LOG.error("Use -h or --help for usage instructions.");
return 0;
return EXIT_FAILURE;
}

if (outputRoot == null) {
System.err
.println("Destination file-system (--" + Options.COPY_TO.getLongOpt() + ") not provided.");
LOG.error("Use -h or --help for usage instructions.");
return 0;
return EXIT_FAILURE;
}

if (targetName == null) {
Expand Down Expand Up @@ -1076,11 +1081,14 @@ public int doWork() throws IOException {
LOG.debug("outputFs={}, outputRoot={}, skipTmp={}, initialOutputSnapshotDir={}", outputFs,
outputRoot.toString(), skipTmp, initialOutputSnapshotDir);

// throw CorruptedSnapshotException if we can't read the snapshot info.
SnapshotDescription sourceSnapshotDesc =
SnapshotDescriptionUtils.readSnapshotInfo(inputFs, snapshotDir);

// Verify snapshot source before copying files
if (verifySource) {
LOG.info("Verify snapshot source, inputFs={}, inputRoot={}, snapshotDir={}.",
inputFs.getUri(), inputRoot, snapshotDir);
verifySnapshot(srcConf, inputFs, inputRoot, snapshotDir);
LOG.info("Verify the source snapshot's expiration status and integrity.");
verifySnapshot(sourceSnapshotDesc, srcConf, inputFs, inputRoot, snapshotDir);
}

// Find the necessary directory which need to change owner and group
Expand All @@ -1101,12 +1109,12 @@ public int doWork() throws IOException {
if (overwrite) {
if (!outputFs.delete(outputSnapshotDir, true)) {
System.err.println("Unable to remove existing snapshot directory: " + outputSnapshotDir);
return 1;
return EXIT_FAILURE;
}
} else {
System.err.println("The snapshot '" + targetName + "' already exists in the destination: "
+ outputSnapshotDir);
return 1;
return EXIT_FAILURE;
}
}

Expand All @@ -1117,7 +1125,7 @@ public int doWork() throws IOException {
if (!outputFs.delete(snapshotTmpDir, true)) {
System.err
.println("Unable to remove existing snapshot tmp directory: " + snapshotTmpDir);
return 1;
return EXIT_FAILURE;
}
} else {
System.err
Expand All @@ -1126,7 +1134,7 @@ public int doWork() throws IOException {
.println("Please check " + snapshotTmpDir + ". If the snapshot has completed, ");
System.err
.println("consider removing " + snapshotTmpDir + " by using the -overwrite option");
return 1;
return EXIT_FAILURE;
}
}
}
Expand Down Expand Up @@ -1205,19 +1213,21 @@ public int doWork() throws IOException {

// Step 4 - Verify snapshot integrity
if (verifyTarget) {
LOG.info("Verify snapshot integrity");
verifySnapshot(destConf, outputFs, outputRoot, outputSnapshotDir);
LOG.info("Verify the exported snapshot's expiration status and integrity.");
SnapshotDescription targetSnapshotDesc =
SnapshotDescriptionUtils.readSnapshotInfo(outputFs, outputSnapshotDir);
verifySnapshot(targetSnapshotDesc, destConf, outputFs, outputRoot, outputSnapshotDir);
}

LOG.info("Export Completed: " + targetName);
return 0;
return EXIT_SUCCESS;
} catch (Exception e) {
LOG.error("Snapshot export failed", e);
if (!skipTmp) {
outputFs.delete(snapshotTmpDir, true);
}
outputFs.delete(outputSnapshotDir, true);
return 1;
return EXIT_FAILURE;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,23 +19,31 @@

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.SnapshotDescription;
import org.apache.hadoop.hbase.client.SnapshotType;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotTTLExpiredException;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.LauncherSecurityManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
Expand Down Expand Up @@ -210,6 +218,40 @@ public void testLoadingSnapshotToTable() throws Exception {
testCopyTableBySnapshot("testLoadingSnapshotToTable", false, false);
}

@Test
public void testLoadingTtlExpiredSnapshotToTable() throws Exception {
String tablePrefix = "testLoadingExpiredSnapshotToTable";
TableName table1 = TableName.valueOf(tablePrefix + 1);
TableName table2 = TableName.valueOf(tablePrefix + 2);
Table t1 = createTable(table1, FAMILY_A, false);
createTable(table2, FAMILY_A, false);
loadData(t1, FAMILY_A, Bytes.toBytes("qualifier"));
String snapshot = tablePrefix + "_snapshot";
Map<String, Object> properties = new HashMap<>();
properties.put("TTL", 10);
SnapshotDescription snapshotDescription = new SnapshotDescription(snapshot, table1,
SnapshotType.FLUSH, null, EnvironmentEdgeManager.currentTime(), -1, properties);
TEST_UTIL.getAdmin().snapshot(snapshotDescription);
boolean isExist =
TEST_UTIL.getAdmin().listSnapshots().stream().anyMatch(ele -> snapshot.equals(ele.getName()));
assertTrue(isExist);
int retry = 6;
while (
!SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime()) && retry > 0
) {
retry--;
Thread.sleep(10 * 1000);
}
boolean isExpiredSnapshot =
SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime());
assertTrue(isExpiredSnapshot);
String[] args = new String[] { "--snapshot", "--new.name=" + table2, "--bulkload", snapshot };
assertThrows(SnapshotTTLExpiredException.class,
() -> runCopy(TEST_UTIL.getConfiguration(), args));
}

@Test
public void tsetLoadingSnapshotToMobTable() throws Exception {
testCopyTableBySnapshot("testLoadingSnapshotToMobTable", false, true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,14 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.SnapshotType;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
Expand Down Expand Up @@ -282,6 +284,39 @@ public void testExportWithResetTtl() throws Exception {
}
}

@Test
public void testExportExpiredSnapshot() throws Exception {
String name = "testExportExpiredSnapshot";
TableName tableName = TableName.valueOf(name);
String snapshotName = "snapshot-" + name;
createTable(tableName);
SnapshotTestingUtils.loadData(TEST_UTIL, tableName, 50, FAMILY);
Map<String, Object> properties = new HashMap<>();
properties.put("TTL", 10);
org.apache.hadoop.hbase.client.SnapshotDescription snapshotDescription =
new org.apache.hadoop.hbase.client.SnapshotDescription(snapshotName, tableName,
SnapshotType.FLUSH, null, EnvironmentEdgeManager.currentTime(), -1, properties);
admin.snapshot(snapshotDescription);
boolean isExist =
admin.listSnapshots().stream().anyMatch(ele -> snapshotName.equals(ele.getName()));
assertTrue(isExist);
int retry = 6;
while (
!SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime()) && retry > 0
) {
retry--;
Thread.sleep(10 * 1000);
}
boolean isExpiredSnapshot =
SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDescription.getTtl(),
snapshotDescription.getCreationTime(), EnvironmentEdgeManager.currentTime());
assertTrue(isExpiredSnapshot);
int res = runExportSnapshot(TEST_UTIL.getConfiguration(), snapshotName, snapshotName,
TEST_UTIL.getDefaultRootDirPath(), getHdfsDestinationDir(), false, false, false, true, true);
assertTrue(res == AbstractHBaseTool.EXIT_FAILURE);
}

private void testExportFileSystemState(final TableName tableName, final byte[] snapshotName,
final byte[] targetName, int filesExpected) throws Exception {
testExportFileSystemState(tableName, snapshotName, targetName, filesExpected,
Expand Down Expand Up @@ -313,29 +348,10 @@ protected static void testExportFileSystemState(final Configuration conf,
FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
FileSystem srcFs = srcDir.getFileSystem(conf);
Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), tgtFs.getWorkingDirectory());
LOG.info("tgtFsUri={}, tgtDir={}, rawTgtDir={}, srcFsUri={}, srcDir={}", tgtFs.getUri(), tgtDir,
rawTgtDir, srcFs.getUri(), srcDir);
List<String> opts = new ArrayList<>();
opts.add("--snapshot");
opts.add(Bytes.toString(snapshotName));
opts.add("--copy-to");
opts.add(tgtDir.toString());
if (targetName != snapshotName) {
opts.add("--target");
opts.add(Bytes.toString(targetName));
}
if (overwrite) {
opts.add("--overwrite");
}
if (resetTtl) {
opts.add("--reset-ttl");
}
if (!checksumVerify) {
opts.add("--no-checksum-verify");
}

// Export Snapshot
int res = run(conf, new ExportSnapshot(), opts.toArray(new String[opts.size()]));
int res = runExportSnapshot(conf, Bytes.toString(snapshotName), Bytes.toString(targetName), srcDir, rawTgtDir, overwrite,
resetTtl, checksumVerify, true, true);
assertEquals("success " + success + ", res=" + res, success ? 0 : 1, res);
if (!success) {
final Path targetDir = new Path(HConstants.SNAPSHOT_DIR_NAME, Bytes.toString(targetName));
Expand Down Expand Up @@ -468,4 +484,42 @@ private static void removeExportDir(final Path path) throws IOException {
FileSystem fs = FileSystem.get(path.toUri(), new Configuration());
fs.delete(path, true);
}

private static int runExportSnapshot(final Configuration conf, final String sourceSnapshotName,
final String targetSnapshotName, final Path srcDir, Path rawTgtDir, final boolean overwrite,
final boolean resetTtl, final boolean checksumVerify, final boolean noSourceVerify,
final boolean noTargetVerify) throws Exception {
FileSystem tgtFs = rawTgtDir.getFileSystem(conf);
FileSystem srcFs = srcDir.getFileSystem(conf);
Path tgtDir = rawTgtDir.makeQualified(tgtFs.getUri(), tgtFs.getWorkingDirectory());
LOG.info("tgtFsUri={}, tgtDir={}, rawTgtDir={}, srcFsUri={}, srcDir={}", tgtFs.getUri(), tgtDir,
rawTgtDir, srcFs.getUri(), srcDir);
List<String> opts = new ArrayList<>();
opts.add("--snapshot");
opts.add(sourceSnapshotName);
opts.add("--copy-to");
opts.add(tgtDir.toString());
if (!targetSnapshotName.equals(sourceSnapshotName)) {
opts.add("--target");
opts.add(targetSnapshotName);
}
if (overwrite) {
opts.add("--overwrite");
}
if (resetTtl) {
opts.add("--reset-ttl");
}
if (!checksumVerify) {
opts.add("--no-checksum-verify");
}
if (!noSourceVerify) {
opts.add("--no-source-verify");
}
if (!noTargetVerify) {
opts.add("--no-target-verify");
}

// Export Snapshot
return run(conf, new ExportSnapshot(), opts.toArray(new String[opts.size()]));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import org.apache.hadoop.hbase.security.access.TablePermission;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ModifyRegionUtils;
import org.apache.hadoop.hbase.util.Pair;
Expand Down Expand Up @@ -877,6 +878,12 @@ public static RestoreMetaChanges copySnapshotForScanner(Configuration conf, File

Path snapshotDir = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir);
SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
// check if the snapshot is expired.
boolean isExpired = SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(),
snapshotDesc.getCreationTime(), EnvironmentEdgeManager.currentTime());
if (isExpired) {
throw new SnapshotTTLExpiredException(ProtobufUtil.createSnapshotDesc(snapshotDesc));
}
SnapshotManifest manifest = SnapshotManifest.open(conf, fs, snapshotDir, snapshotDesc);

MonitoredTask status = TaskMonitor.get()
Expand Down
Loading

0 comments on commit 1a90b34

Please sign in to comment.