diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java index 25055fd5e8e6..a44c8843c993 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupAdmin.java @@ -76,14 +76,6 @@ public interface BackupAdmin extends Closeable { */ void mergeBackups(String[] backupIds) throws IOException; - /** - * Show backup history command - * @param n last n backup sessions - * @return list of backup info objects - * @throws IOException exception - */ - List getHistory(int n) throws IOException; - /** * Show backup history command with filters * @param n last n backup sessions diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java index f0dc10b83619..39f903185a68 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupInfo.java @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.function.Predicate; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.util.BackupUtils; @@ -46,13 +47,22 @@ public class BackupInfo implements Comparable { private static final Logger LOG = LoggerFactory.getLogger(BackupInfo.class); private static final int MAX_FAILED_MESSAGE_LENGTH = 1024; - public interface Filter { - /** - * Filter interface - * @param info backup info - * @return true if info passes filter, false otherwise - */ - boolean apply(BackupInfo info); + public interface Filter extends Predicate { + /** Returns true if the BackupInfo passes the filter, false otherwise */ + @Override + boolean test(BackupInfo backupInfo); + } + + public static Filter withRoot(String backupRoot) { + return info -> info.getBackupRootDir().equals(backupRoot); + } + + public static Filter withType(BackupType type) { + return info -> info.getType() == type; + } + + public static Filter withState(BackupState state) { + return info -> info.getState() == state; } /** @@ -61,8 +71,7 @@ public interface Filter { public enum BackupState { RUNNING, COMPLETE, - FAILED, - ANY + FAILED } /** diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java index c36b398e5e86..0044918b077c 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java @@ -17,6 +17,10 @@ */ package org.apache.hadoop.hbase.backup.impl; +import static org.apache.hadoop.hbase.backup.BackupInfo.withRoot; +import static org.apache.hadoop.hbase.backup.BackupInfo.withState; +import static org.apache.hadoop.hbase.backup.BackupInfo.withType; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -74,7 +78,7 @@ public BackupInfo getBackupInfo(String backupId) throws IOException { BackupInfo backupInfo; try (final BackupSystemTable table = new BackupSystemTable(conn)) { if (backupId == null) { - ArrayList recentSessions = table.getBackupInfos(BackupState.RUNNING); + List recentSessions = table.getBackupInfos(withState(BackupState.RUNNING)); if (recentSessions.isEmpty()) { LOG.warn("No ongoing sessions found."); return null; @@ -111,7 +115,7 @@ public int deleteBackups(String[] backupIds) throws IOException { } // Step 2: Make sure there is no failed session - List list = sysTable.getBackupInfos(BackupState.RUNNING); + List list = sysTable.getBackupInfos(withState(BackupState.RUNNING)); if (list.size() != 0) { // ailed sessions found LOG.warn("Failed backup session found. Run backup repair tool first."); @@ -301,7 +305,7 @@ private List getAffectedBackupSessions(BackupInfo backupInfo, TableN LOG.debug("GetAffectedBackupInfos for: " + backupInfo.getBackupId() + " table=" + tn); long ts = backupInfo.getStartTs(); List list = new ArrayList<>(); - List history = table.getBackupHistory(backupInfo.getBackupRootDir()); + List history = table.getBackupHistory(withRoot(backupInfo.getBackupRootDir())); // Scan from most recent to backupInfo // break when backupInfo reached for (BackupInfo info : history) { @@ -367,49 +371,10 @@ private boolean isLastBackupSession(BackupSystemTable table, TableName tn, long return false; } - @Override - public List getHistory(int n) throws IOException { - try (final BackupSystemTable table = new BackupSystemTable(conn)) { - List history = table.getBackupHistory(); - - if (history.size() <= n) { - return history; - } - - List list = new ArrayList<>(); - for (int i = 0; i < n; i++) { - list.add(history.get(i)); - } - return list; - } - } - @Override public List getHistory(int n, BackupInfo.Filter... filters) throws IOException { - if (filters.length == 0) { - return getHistory(n); - } - try (final BackupSystemTable table = new BackupSystemTable(conn)) { - List history = table.getBackupHistory(); - List result = new ArrayList<>(); - for (BackupInfo bi : history) { - if (result.size() == n) { - break; - } - - boolean passed = true; - for (int i = 0; i < filters.length; i++) { - if (!filters[i].apply(bi)) { - passed = false; - break; - } - } - if (passed) { - result.add(bi); - } - } - return result; + return table.getBackupInfos(n, filters); } } @@ -672,7 +637,7 @@ private void checkIfValidForMerge(String[] backupIds, BackupSystemTable table) // Filter 1 : backupRoot // Filter 2 : time range filter // Filter 3 : table filter - BackupInfo.Filter destinationFilter = info -> info.getBackupRootDir().equals(backupDest); + BackupInfo.Filter destinationFilter = withRoot(backupDest); BackupInfo.Filter timeRangeFilter = info -> { long time = info.getStartTs(); @@ -684,10 +649,10 @@ private void checkIfValidForMerge(String[] backupIds, BackupSystemTable table) return !Collections.disjoint(allTables, tables); }; - BackupInfo.Filter typeFilter = info -> info.getType() == BackupType.INCREMENTAL; - BackupInfo.Filter stateFilter = info -> info.getState() == BackupState.COMPLETE; + BackupInfo.Filter typeFilter = withType(BackupType.INCREMENTAL); + BackupInfo.Filter stateFilter = withState(BackupState.COMPLETE); - List allInfos = table.getBackupHistory(-1, destinationFilter, timeRangeFilter, + List allInfos = table.getBackupHistory(destinationFilter, timeRangeFilter, tableFilter, typeFilter, stateFilter); if (allInfos.size() != allBackups.size()) { // Yes we have at least one hole in backup image sequence diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java index 66694f4384f4..2926b7a8ee63 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.backup.impl; +import static org.apache.hadoop.hbase.backup.BackupInfo.withState; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BACKUP_LIST_DESC; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH; import static org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC; @@ -153,7 +154,7 @@ public void execute() throws IOException { if (requiresNoActiveSession()) { // Check active session try (BackupSystemTable table = new BackupSystemTable(conn)) { - List sessions = table.getBackupInfos(BackupState.RUNNING); + List sessions = table.getBackupInfos(withState(BackupState.RUNNING)); if (sessions.size() > 0) { System.err.println("Found backup session in a RUNNING state: "); @@ -528,7 +529,7 @@ public void execute() throws IOException { if (backupId != null) { info = sysTable.readBackupInfo(backupId); } else { - List infos = sysTable.getBackupInfos(BackupState.RUNNING); + List infos = sysTable.getBackupInfos(withState(BackupState.RUNNING)); if (infos != null && infos.size() > 0) { info = infos.get(0); backupId = info.getBackupId(); @@ -594,18 +595,15 @@ private void executeDeleteOlderThan(CommandLine cmdline) throws IOException { throw new IOException(value + " is not an integer number"); } final long fdays = days; - BackupInfo.Filter dateFilter = new BackupInfo.Filter() { - @Override - public boolean apply(BackupInfo info) { - long currentTime = EnvironmentEdgeManager.currentTime(); - long maxTsToDelete = currentTime - fdays * 24 * 3600 * 1000; - return info.getCompleteTs() <= maxTsToDelete; - } + BackupInfo.Filter dateFilter = info -> { + long currentTime = EnvironmentEdgeManager.currentTime(); + long maxTsToDelete = currentTime - fdays * 24 * 3600 * 1000; + return info.getCompleteTs() <= maxTsToDelete; }; List history = null; try (final BackupSystemTable sysTable = new BackupSystemTable(conn); BackupAdminImpl admin = new BackupAdminImpl(conn)) { - history = sysTable.getBackupHistory(-1, dateFilter); + history = sysTable.getBackupHistory(dateFilter); String[] backupIds = convertToBackupIds(history); int deleted = admin.deleteBackups(backupIds); System.out.println("Deleted " + deleted + " backups. Total older than " + days + " days: " @@ -679,7 +677,7 @@ public void execute() throws IOException { final BackupSystemTable sysTable = new BackupSystemTable(conn)) { // Failed backup BackupInfo backupInfo; - List list = sysTable.getBackupInfos(BackupState.RUNNING); + List list = sysTable.getBackupInfos(withState(BackupState.RUNNING)); if (list.size() == 0) { // No failed sessions found System.out.println("REPAIR status: no failed sessions found." @@ -860,27 +858,21 @@ public void execute() throws IOException { int n = parseHistoryLength(); final TableName tableName = getTableName(); final String setName = getTableSetName(); - BackupInfo.Filter tableNameFilter = new BackupInfo.Filter() { - @Override - public boolean apply(BackupInfo info) { - if (tableName == null) { - return true; - } - - List names = info.getTableNames(); - return names.contains(tableName); + BackupInfo.Filter tableNameFilter = info -> { + if (tableName == null) { + return true; } - }; - BackupInfo.Filter tableSetFilter = new BackupInfo.Filter() { - @Override - public boolean apply(BackupInfo info) { - if (setName == null) { - return true; - } - String backupId = info.getBackupId(); - return backupId.startsWith(setName); + List names = info.getTableNames(); + return names.contains(tableName); + }; + BackupInfo.Filter tableSetFilter = info -> { + if (setName == null) { + return true; } + + String backupId = info.getBackupId(); + return backupId.startsWith(setName); }; Path backupRootPath = getBackupRootPath(); List history; @@ -888,7 +880,8 @@ public boolean apply(BackupInfo info) { // Load from backup system table super.execute(); try (final BackupSystemTable sysTable = new BackupSystemTable(conn)) { - history = sysTable.getBackupHistory(n, tableNameFilter, tableSetFilter); + history = sysTable.getBackupHistory(tableNameFilter, tableSetFilter); + history = history.subList(0, Math.min(n, history.size())); } } else { // load from backup FS diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java index 810af8f032ce..9cab455441bb 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.backup.impl; +import static org.apache.hadoop.hbase.backup.BackupInfo.withState; + import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; @@ -249,7 +251,7 @@ public BackupInfo createBackupInfo(String backupId, BackupType type, List sessions = systemTable.getBackupInfos(BackupState.RUNNING); + List sessions = systemTable.getBackupInfos(withState(BackupState.RUNNING)); if (sessions.size() == 0) { return null; } @@ -371,16 +373,10 @@ public void deleteBulkLoadedRows(List rows) throws IOException { } /** - * Get all completed backup information (in desc order by time) - * @return history info of BackupCompleteData - * @throws IOException exception + * Get all backup information, ordered by descending start time. I.e. from newest to oldest. */ - public List getBackupHistory() throws IOException { - return systemTable.getBackupHistory(); - } - - public ArrayList getBackupHistory(boolean completed) throws IOException { - return systemTable.getBackupHistory(completed); + public List getBackupHistory(BackupInfo.Filter... filters) throws IOException { + return systemTable.getBackupHistory(filters); } /** diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java index f2ddcf5e7573..3b46335a7299 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java @@ -17,6 +17,10 @@ */ package org.apache.hadoop.hbase.backup.impl; +import static org.apache.hadoop.hbase.backup.BackupInfo.withRoot; +import static org.apache.hadoop.hbase.backup.BackupInfo.withState; +import static org.apache.hadoop.hbase.backup.BackupInfo.withType; + import edu.umd.cs.findbugs.annotations.Nullable; import java.io.Closeable; import java.io.IOException; @@ -26,6 +30,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -36,7 +41,9 @@ import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; +import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; @@ -54,7 +61,6 @@ import org.apache.hadoop.hbase.backup.BackupInfo.BackupState; import org.apache.hadoop.hbase.backup.BackupRestoreConstants; import org.apache.hadoop.hbase.backup.BackupType; -import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; @@ -592,123 +598,26 @@ public void writeRegionServerLastLogRollResult(String server, Long ts, String ba } /** - * Get all completed backup information (in desc order by time) - * @param onlyCompleted true, if only successfully completed sessions - * @return history info of BackupCompleteData - * @throws IOException exception + * Get all backup information passing the given filters, ordered by descending start time. I.e. + * from newest to oldest. */ - public ArrayList getBackupHistory(boolean onlyCompleted) throws IOException { + public List getBackupHistory(BackupInfo.Filter... toInclude) throws IOException { LOG.trace("get backup history from backup system table"); - BackupState state = onlyCompleted ? BackupState.COMPLETE : BackupState.ANY; - ArrayList list = getBackupInfos(state); - return BackupUtils.sortHistoryListDesc(list); - } - - /** - * Get all backups history - * @return list of backup info - * @throws IOException if getting the backup history fails - */ - public List getBackupHistory() throws IOException { - return getBackupHistory(false); - } - - /** - * Get first n backup history records - * @param n number of records, if n== -1 - max number is ignored - * @return list of records - * @throws IOException if getting the backup history fails - */ - public List getHistory(int n) throws IOException { - List history = getBackupHistory(); - if (n == -1 || history.size() <= n) { - return history; - } - return Collections.unmodifiableList(history.subList(0, n)); - } - - /** - * Get backup history records filtered by list of filters. - * @param n max number of records, if n == -1 , then max number is ignored - * @param filters list of filters - * @return backup records - * @throws IOException if getting the backup history fails - */ - public List getBackupHistory(int n, BackupInfo.Filter... filters) throws IOException { - if (filters.length == 0) { - return getHistory(n); - } - - List history = getBackupHistory(); - List result = new ArrayList<>(); - for (BackupInfo bi : history) { - if (n >= 0 && result.size() == n) { - break; - } - - boolean passed = true; - for (int i = 0; i < filters.length; i++) { - if (!filters[i].apply(bi)) { - passed = false; - break; - } - } - if (passed) { - result.add(bi); - } - } - return result; + List list = getBackupInfos(toInclude); + list.sort(Comparator.comparing(BackupInfo::getStartTs).reversed()); + return list; } /** - * Retrieve all table names that are part of any known backup + * Retrieve all table names that are part of any known completed backup */ public Set getTablesIncludedInBackups() throws IOException { - Set names = new HashSet<>(); - List infos = getBackupHistory(true); - for (BackupInfo info : infos) { - // Incremental backups have the same tables as the preceding full backups - if (info.getType() == BackupType.FULL) { - names.addAll(info.getTableNames()); - } - } - return names; - } - - /** - * Get history for backup destination - * @param backupRoot backup destination path - * @return List of backup info - * @throws IOException if getting the backup history fails - */ - public List getBackupHistory(String backupRoot) throws IOException { - ArrayList history = getBackupHistory(false); - for (Iterator iterator = history.iterator(); iterator.hasNext();) { - BackupInfo info = iterator.next(); - if (!backupRoot.equals(info.getBackupRootDir())) { - iterator.remove(); - } - } - return history; - } - - /** - * Get history for a table - * @param name table name - * @return history for a table - * @throws IOException if getting the backup history fails - */ - public List getBackupHistoryForTable(TableName name) throws IOException { - List history = getBackupHistory(); - List tableHistory = new ArrayList<>(); - for (BackupInfo info : history) { - List tables = info.getTableNames(); - if (tables.contains(name)) { - tableHistory.add(info); - } - } - return tableHistory; + // Incremental backups have the same tables as the preceding full backups + List infos = + getBackupInfos(withState(BackupState.COMPLETE), withType(BackupType.FULL)); + return infos.stream().flatMap(info -> info.getTableNames().stream()) + .collect(Collectors.toSet()); } /** @@ -722,7 +631,7 @@ public List getBackupHistoryForTable(TableName name) throws IOExcept */ public Map> getBackupHistoryForTableSet(Set set, String backupRoot) throws IOException { - List history = getBackupHistory(backupRoot); + List history = getBackupHistory(withRoot(backupRoot)); Map> tableHistoryMap = new HashMap<>(); for (BackupInfo info : history) { List tables = info.getTableNames(); @@ -738,16 +647,27 @@ public Map> getBackupHistoryForTableSet(Set getBackupInfos(BackupState state) throws IOException { + public List getBackupInfos(BackupInfo.Filter... toInclude) throws IOException { + return getBackupInfos(Integer.MAX_VALUE, toInclude); + } + + /** + * Get the first n backup infos passing the given filters (ordered by ascending backup id) + */ + public List getBackupInfos(int n, BackupInfo.Filter... toInclude) throws IOException { LOG.trace("get backup infos from backup system table"); + if (n <= 0) { + return Collections.emptyList(); + } + + Predicate combinedPredicate = Stream.of(toInclude) + .map(filter -> (Predicate) filter).reduce(Predicate::and).orElse(x -> true); + Scan scan = createScanForBackupHistory(); - ArrayList list = new ArrayList<>(); + List list = new ArrayList<>(); try (Table table = connection.getTable(tableName); ResultScanner scanner = table.getScanner(scan)) { @@ -755,10 +675,12 @@ public ArrayList getBackupInfos(BackupState state) throws IOExceptio while ((res = scanner.next()) != null) { res.advance(); BackupInfo context = cellToBackupInfo(res.current()); - if (state != BackupState.ANY && context.getState() != state) { - continue; + if (combinedPredicate.test(context)) { + list.add(context); + if (list.size() == n) { + break; + } } - list.add(context); } return list; } diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java index 30c27f01faaf..5d6d3242b120 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.backup.impl; +import static org.apache.hadoop.hbase.backup.BackupInfo.withState; + import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -310,7 +312,8 @@ protected List getAncestors(BackupInfo backupInfo) throws IOExcepti Set tablesToCover = new HashSet<>(backupInfo.getTables()); // Go over the backup history list from newest to oldest - List allHistoryList = backupManager.getBackupHistory(true); + List allHistoryList = + backupManager.getBackupHistory(withState(BackupState.COMPLETE)); for (BackupInfo backup : allHistoryList) { // If the image has a different rootDir, it cannot be an ancestor. if (!Objects.equals(backup.getBackupRootDir(), backupInfo.getBackupRootDir())) { diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java index 41f188574040..7c62b7a681d8 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hbase.backup.master; +import static org.apache.hadoop.hbase.backup.BackupInfo.withState; + import java.io.IOException; import java.time.Duration; import java.util.ArrayList; @@ -31,6 +33,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.BackupInfo; +import org.apache.hadoop.hbase.backup.BackupInfo.BackupState; import org.apache.hadoop.hbase.backup.BackupRestoreConstants; import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; @@ -91,7 +94,7 @@ public void init(Map params) { */ private BackupBoundaries serverToPreservationBoundaryTs(BackupSystemTable sysTable) throws IOException { - List backups = sysTable.getBackupHistory(true); + List backups = sysTable.getBackupHistory(withState(BackupState.COMPLETE)); if (LOG.isDebugEnabled()) { LOG.debug( "Cleaning WALs if they are older than the WAL cleanup time-boundary. " diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java index c327172d1c92..033975c42484 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java @@ -24,12 +24,12 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.TreeMap; import java.util.TreeSet; +import java.util.function.Predicate; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; @@ -494,24 +494,6 @@ public static String getTableBackupDir(String backupRootDir, String backupId, + Path.SEPARATOR; } - /** - * Sort history list by start time in descending order. - * @param historyList history list - * @return sorted list of BackupCompleteData - */ - public static ArrayList sortHistoryListDesc(ArrayList historyList) { - ArrayList list = new ArrayList<>(); - TreeMap map = new TreeMap<>(); - for (BackupInfo h : historyList) { - map.put(Long.toString(h.getStartTs()), h); - } - Iterator i = map.descendingKeySet().iterator(); - while (i.hasNext()) { - list.add(map.get(i.next())); - } - return list; - } - /** * Calls fs.listStatus() and treats FileNotFoundException as non-fatal This accommodates * differences between hadoop versions, where hadoop 1 does not throw a FileNotFoundException, and @@ -612,23 +594,11 @@ private long getTimestamp(String backupId) { public static List getHistory(Configuration conf, int n, Path backupRootPath, BackupInfo.Filter... filters) throws IOException { List infos = getHistory(conf, backupRootPath); - List ret = new ArrayList<>(); - for (BackupInfo info : infos) { - if (ret.size() == n) { - break; - } - boolean passed = true; - for (int i = 0; i < filters.length; i++) { - if (!filters[i].apply(info)) { - passed = false; - break; - } - } - if (passed) { - ret.add(info); - } - } - return ret; + + Predicate combinedPredicate = Stream.of(filters) + .map(filter -> (Predicate) filter).reduce(Predicate::and).orElse(x -> true); + + return infos.stream().filter(combinedPredicate).limit(n).toList(); } public static BackupInfo loadBackupInfo(Path backupRootPath, String backupId, FileSystem fs) diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java index fa624250929d..165003fdb5cb 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java @@ -74,8 +74,7 @@ public void testBackupHistory() throws Exception { List history = getBackupAdmin().getHistory(10); assertTrue(findBackup(history, backupId)); - BackupInfo.Filter nullFilter = info -> true; - history = BackupUtils.getHistory(conf1, 10, new Path(BACKUP_ROOT_DIR), nullFilter); + history = BackupUtils.getHistory(conf1, 10, new Path(BACKUP_ROOT_DIR)); assertTrue(findBackup(history, backupId)); ByteArrayOutputStream baos = new ByteArrayOutputStream();