Skip to content

Commit

Permalink
HBASE-27201 Clean up error-prone findings in hbase-backup
Browse files Browse the repository at this point in the history
  • Loading branch information
apurtell committed Jul 22, 2022
1 parent 31fc97e commit fa5e1a7
Show file tree
Hide file tree
Showing 16 changed files with 130 additions and 126 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@

import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;

/**
* An object to encapsulate the information for each backup session
Expand Down Expand Up @@ -451,13 +450,13 @@ public byte[] toByteArray() throws IOException {
return toProtosBackupInfo().toByteArray();
}

private void setBackupTableInfoMap(Builder builder) {
private void setBackupTableInfoMap(BackupProtos.BackupInfo.Builder builder) {
for (Entry<TableName, BackupTableInfo> entry : backupTableInfoMap.entrySet()) {
builder.addBackupTableInfo(entry.getValue().toProto());
}
}

private void setTableSetTimestampMap(Builder builder) {
private void setTableSetTimestampMap(BackupProtos.BackupInfo.Builder builder) {
if (this.getTableSetTimestampMap() != null) {
for (Entry<TableName, Map<String, Long>> entry : this.getTableSetTimestampMap().entrySet()) {
builder.putTableSetTimestamp(entry.getKey().getNameAsString(),
Expand Down Expand Up @@ -531,10 +530,9 @@ public String getShortDescription() {
sb.append("Type=" + getType()).append(",");
sb.append("Tables=" + getTableListAsString()).append(",");
sb.append("State=" + getState()).append(",");
Date date = null;
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(getStartTs());
date = cal.getTime();
Date date = cal.getTime();
sb.append("Start time=" + date).append(",");
if (state == BackupState.FAILED) {
sb.append("Failed message=" + getFailedMsg()).append(",");
Expand All @@ -560,7 +558,7 @@ public String getStatusAndProgressAsString() {
}

public String getTableListAsString() {
StringBuffer sb = new StringBuffer();
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append(StringUtils.join(backupTableInfoMap.keySet(), ","));
sb.append("}");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ protected void init() {
Log4jUtils.disableZkAndClientLoggers();
}

private int parseAndRun(String[] args) throws IOException {
private int parseAndRun() throws IOException {
// Check if backup is enabled
if (!BackupManager.isBackupEnabled(getConf())) {
System.err.println(BackupRestoreConstants.ENABLE_BACKUP);
Expand Down Expand Up @@ -146,7 +146,7 @@ private int parseAndRun(String[] args) throws IOException {
if (cmd.hasOption(OPTION_SET)) {
String setName = cmd.getOptionValue(OPTION_SET);
try {
tables = getTablesForSet(conn, setName, conf);
tables = getTablesForSet(conn, setName);
} catch (IOException e) {
System.out.println("ERROR: " + e.getMessage() + " for setName=" + setName);
printToolUsage();
Expand Down Expand Up @@ -182,8 +182,7 @@ private int parseAndRun(String[] args) throws IOException {
return 0;
}

private String getTablesForSet(Connection conn, String name, Configuration conf)
throws IOException {
private String getTablesForSet(Connection conn, String name) throws IOException {
try (final BackupSystemTable table = new BackupSystemTable(conn)) {
List<TableName> tables = table.describeBackupSet(name);

Expand Down Expand Up @@ -214,7 +213,7 @@ protected void processOptions(CommandLine cmd) {

@Override
protected int doWork() throws Exception {
return parseAndRun(cmd.getArgs());
return parseAndRun();
}

public static void main(String[] args) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;

import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
Expand Down Expand Up @@ -310,7 +311,7 @@ public void execute() throws IOException {
String setName = null;
if (cmdline.hasOption(OPTION_SET)) {
setName = cmdline.getOptionValue(OPTION_SET);
tables = getTablesForSet(setName, getConf());
tables = getTablesForSet(setName);

if (tables == null) {
System.out
Expand Down Expand Up @@ -371,7 +372,7 @@ private boolean verifyPath(String path) {
}
}

private String getTablesForSet(String name, Configuration conf) throws IOException {
private String getTablesForSet(String name) throws IOException {
try (final BackupSystemTable table = new BackupSystemTable(conn)) {
List<TableName> tables = table.describeBackupSet(name);

Expand Down Expand Up @@ -1001,14 +1002,14 @@ public void execute() throws IOException {
processSetDescribe(args);
break;
case SET_LIST:
processSetList(args);
processSetList();
break;
default:
break;
}
}

private void processSetList(String[] args) throws IOException {
private void processSetList() throws IOException {
super.execute();

// List all backup set names
Expand Down Expand Up @@ -1089,10 +1090,11 @@ private void processSetAdd(String[] args) throws IOException {
super.execute();

String setName = args[2];
String[] tables = args[3].split(",");
TableName[] tableNames = new TableName[tables.length];
for (int i = 0; i < tables.length; i++) {
tableNames[i] = TableName.valueOf(tables[i]);
List<String> tables = Splitter.on(',').splitToList(args[3]);
TableName[] tableNames = new TableName[tables.size()];
int i = 0;
for (String table : tables) {
tableNames[i++] = TableName.valueOf(table);
}
try (final BackupAdminImpl admin = new BackupAdminImpl(conn)) {
admin.addToBackupSet(setName, tableNames);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,6 @@ private void setIncrTimeRanges(Map<TableName, Map<String, Long>> incrTimeRanges)
}

// backup image directory
private String tableBackupDir = null;
private BackupImage backupImage;

/**
Expand All @@ -385,7 +384,6 @@ public BackupManifest(BackupInfo backup) {
* @param backup The ongoing backup session info
*/
public BackupManifest(BackupInfo backup, TableName table) {
this.tableBackupDir = backup.getTableBackupDir(table);
List<TableName> tables = new ArrayList<TableName>();
tables.add(table);
BackupImage.Builder builder = BackupImage.newBuilder();
Expand Down Expand Up @@ -468,7 +466,7 @@ public List<TableName> getTableList() {

/**
* TODO: fix it. Persist the manifest file.
* @throws IOException IOException when storing the manifest file.
* @throws BackupException if an error occurred while storing the manifest file.
*/
public void store(Configuration conf) throws BackupException {
byte[] data = backupImage.toProto().toByteArray();
Expand Down Expand Up @@ -526,7 +524,7 @@ public ArrayList<BackupImage> getRestoreDependentList(boolean reverse) {
restoreImages.put(Long.valueOf(image.startTs), image);
}
return new ArrayList<>(
reverse ? (restoreImages.descendingMap().values()) : (restoreImages.values()));
reverse ? restoreImages.descendingMap().values() : restoreImages.values());
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@

import java.io.Closeable;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
Expand Down Expand Up @@ -69,6 +71,8 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.base.Splitter;

import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;

Expand Down Expand Up @@ -237,6 +241,7 @@ private void waitForSystemTable(Admin admin, TableName tableName) throws IOExcep
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw (IOException) new InterruptedIOException().initCause(e);
}
if (EnvironmentEdgeManager.currentTime() - startTime > TIMEOUT) {
throw new IOException(
Expand Down Expand Up @@ -574,7 +579,7 @@ public String readBackupStartCode(String backupRoot) throws IOException {
if (val.length == 0) {
return null;
}
return new String(val);
return new String(val, StandardCharsets.UTF_8);
}
}

Expand Down Expand Up @@ -1639,7 +1644,7 @@ public String[] getListOfBackupIdsFromDeleteOperation() throws IOException {
if (val.length == 0) {
return null;
}
return new String(val).split(",");
return new String(val, StandardCharsets.UTF_8).split(",");
}
}

Expand All @@ -1654,7 +1659,7 @@ public boolean isMergeInProgress() throws IOException {
Get get = new Get(MERGE_OP_ROW);
try (Table table = connection.getTable(tableName)) {
Result res = table.get(get);
return (!res.isEmpty());
return !res.isEmpty();
}
}

Expand Down Expand Up @@ -1720,7 +1725,7 @@ public String[] getListOfBackupIdsFromMergeOperation() throws IOException {
if (val.length == 0) {
return null;
}
return new String(val).split(",");
return new String(val, StandardCharsets.UTF_8).split(",");
}
}

Expand All @@ -1737,20 +1742,31 @@ static Scan createScanForOrigBulkLoadedFiles(TableName table) {
}

static String getTableNameFromOrigBulkLoadRow(String rowStr) {
String[] parts = rowStr.split(BLK_LD_DELIM);
return parts[1];
// format is bulk : namespace : table : region : file
Iterator<String> i = Splitter.onPattern(BLK_LD_DELIM).split(rowStr).iterator();
i.next(); // Skip the first entry
return i.next();
}

static String getRegionNameFromOrigBulkLoadRow(String rowStr) {
// format is bulk : namespace : table : region : file
String[] parts = rowStr.split(BLK_LD_DELIM);
int idx = 3;
if (parts.length == 4) {
// the table is in default namespace
idx = 2;
}
LOG.debug("bulk row string " + rowStr + " region " + parts[idx]);
return parts[idx];
List<String> parts = Splitter.onPattern(BLK_LD_DELIM).splitToList(rowStr);
Iterator<String> i = parts.iterator();
String region;
if (parts.size() == 4) {
// the table is in default namespace, get it from position 3
i.next(); // Skip the first entry
i.next(); // Skip the second entry
region = i.next();
} else {
// get region from position 4
i.next(); // Skip the first entry
i.next(); // Skip the second entry
i.next(); // Skip the third entry
region = i.next();
}
LOG.debug("bulk row string " + rowStr + " region " + region);
return region;
}

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,8 +204,7 @@ private void copyBulkLoadedFiles(List<String> activeFiles, List<String> archiveF
String tgtDest = backupInfo.getBackupRootDir() + Path.SEPARATOR + backupInfo.getBackupId();
int attempt = 1;
while (activeFiles.size() > 0) {
LOG.info(
"Copy " + activeFiles.size() + " active bulk loaded files. Attempt =" + (attempt++));
LOG.info("Copy " + activeFiles.size() + " active bulk loaded files. Attempt =" + attempt++);
String[] toCopy = new String[activeFiles.size()];
activeFiles.toArray(toCopy);
// Active file can be archived during copy operation,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ protected void addManifest(BackupInfo backupInfo, BackupManager backupManager, B
* @return meta data dir
*/
protected String obtainBackupMetaDataStr(BackupInfo backupInfo) {
StringBuffer sb = new StringBuffer();
StringBuilder sb = new StringBuilder();
sb.append("type=" + backupInfo.getType() + ",tablelist=");
for (TableName table : backupInfo.getTables()) {
sb.append(table + ";");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@
import org.apache.hadoop.tools.DistCpConstants;
import org.apache.hadoop.tools.DistCpOptions;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException.NoNodeException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -127,7 +126,7 @@ public BackupInfo getBackupInfo() {
* @param backupInfo backup info
* @param newProgress progress
* @param bytesCopied bytes copied
* @throws NoNodeException exception
* @throws IOException exception
*/
static void updateProgress(BackupInfo backupInfo, BackupManager backupManager, int newProgress,
long bytesCopied) throws IOException {
Expand Down Expand Up @@ -361,7 +360,7 @@ private SequenceFile.Writer getWriter(Path pathToListFile) throws IOException {
* @param conf The hadoop configuration
* @param copyType The backup copy type
* @param options Options for customized ExportSnapshot or DistCp
* @throws Exception exception
* @throws IOException exception
*/
@Override
public int copy(BackupInfo context, BackupManager backupManager, Configuration conf,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,12 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Stack;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -257,7 +258,7 @@ protected void copyFile(FileSystem fs, Path p, Path newPath) throws IOException
*/
protected Path convertToDest(Path p, Path backupDirPath) {
String backupId = backupDirPath.getName();
Stack<String> stack = new Stack<String>();
Deque<String> stack = new ArrayDeque<String>();
String name = null;
while (true) {
name = p.getName();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.backup.regionserver;

import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.Callable;
Expand Down Expand Up @@ -56,7 +57,7 @@ public LogRollBackupSubprocedure(RegionServerServices rss, ProcedureMember membe
this.rss = rss;
this.taskManager = taskManager;
if (data != null) {
backupRoot = new String(data);
backupRoot = new String(data, StandardCharsets.UTF_8);
}
}

Expand Down
Loading

0 comments on commit fa5e1a7

Please sign in to comment.