Skip to content

Commit

Permalink
HDDS-11908. Snapshot diff DAG traversal should not skip node based on…
Browse files Browse the repository at this point in the history
… prefix presence (#7567)
  • Loading branch information
swamirishi authored Dec 13, 2024
1 parent 16ba289 commit 66ccc25
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@

import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hdds.StringUtils;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
Expand Down Expand Up @@ -944,10 +943,6 @@ synchronized void internalGetSSTDiffList(
Preconditions.checkArgument(sameFiles.isEmpty(), "Set must be empty");
Preconditions.checkArgument(differentFiles.isEmpty(), "Set must be empty");

// Use source snapshot's table prefix. At this point Source and target's
// table prefix should be same.
Map<String, String> columnFamilyToPrefixMap = src.getTablePrefixes();

for (String fileName : srcSnapFiles) {
if (destSnapFiles.contains(fileName)) {
LOG.debug("Source '{}' and destination '{}' share the same SST '{}'",
Expand Down Expand Up @@ -1011,15 +1006,6 @@ synchronized void internalGetSSTDiffList(
}

for (CompactionNode nextNode : successors) {
if (shouldSkipNode(nextNode, columnFamilyToPrefixMap)) {
LOG.debug("Skipping next node: '{}' with startKey: '{}' and " +
"endKey: '{}' because it doesn't have keys related to " +
"columnFamilyToPrefixMap: '{}'.",
nextNode.getFileName(), nextNode.getStartKey(),
nextNode.getEndKey(), columnFamilyToPrefixMap);
continue;
}

if (sameFiles.contains(nextNode.getFileName()) ||
differentFiles.contains(nextNode.getFileName())) {
LOG.debug("Skipping known processed SST: {}",
Expand Down Expand Up @@ -1541,35 +1527,4 @@ private CompactionFileInfo toFileInfo(String sstFile,
return fileInfoBuilder.build();
}

@VisibleForTesting
boolean shouldSkipNode(CompactionNode node,
Map<String, String> columnFamilyToPrefixMap) {
// This is for backward compatibility. Before the compaction log table
// migration, startKey, endKey and columnFamily information is not persisted
// in compaction log files.
// Also for the scenario when there is an exception in reading SST files
// for the file node.
if (node.getStartKey() == null || node.getEndKey() == null ||
node.getColumnFamily() == null) {
LOG.debug("Compaction node with fileName: {} doesn't have startKey, " +
"endKey and columnFamily details.", node.getFileName());
return false;
}

if (MapUtils.isEmpty(columnFamilyToPrefixMap)) {
LOG.debug("Provided columnFamilyToPrefixMap is null or empty.");
return false;
}

if (!columnFamilyToPrefixMap.containsKey(node.getColumnFamily())) {
LOG.debug("SstFile node: {} is for columnFamily: {} while filter map " +
"contains columnFamilies: {}.", node.getFileName(),
node.getColumnFamily(), columnFamilyToPrefixMap.keySet());
return true;
}

String keyPrefix = columnFamilyToPrefixMap.get(node.getColumnFamily());
return !RocksDiffUtils.isKeyWithPrefixPresent(keyPrefix, node.getStartKey(),
node.getEndKey());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
*/
package org.apache.ozone.rocksdiff;

import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hdds.utils.db.managed.ManagedOptions;
import org.apache.hadoop.hdds.utils.db.managed.ManagedReadOptions;
Expand Down Expand Up @@ -113,4 +115,35 @@ public static boolean doesSstFileContainKeyRange(String filepath,
}


@VisibleForTesting
static boolean shouldSkipNode(CompactionNode node,
Map<String, String> columnFamilyToPrefixMap) {
// This is for backward compatibility. Before the compaction log table
// migration, startKey, endKey and columnFamily information is not persisted
// in compaction log files.
// Also for the scenario when there is an exception in reading SST files
// for the file node.
if (node.getStartKey() == null || node.getEndKey() == null ||
node.getColumnFamily() == null) {
LOG.debug("Compaction node with fileName: {} doesn't have startKey, " +
"endKey and columnFamily details.", node.getFileName());
return false;
}

if (MapUtils.isEmpty(columnFamilyToPrefixMap)) {
LOG.debug("Provided columnFamilyToPrefixMap is null or empty.");
return false;
}

if (!columnFamilyToPrefixMap.containsKey(node.getColumnFamily())) {
LOG.debug("SstFile node: {} is for columnFamily: {} while filter map " +
"contains columnFamilies: {}.", node.getFileName(),
node.getColumnFamily(), columnFamilyToPrefixMap.keySet());
return true;
}

String keyPrefix = columnFamilyToPrefixMap.get(node.getColumnFamily());
return !isKeyWithPrefixPresent(keyPrefix, node.getStartKey(),
node.getEndKey());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1895,7 +1895,7 @@ public void testShouldSkipNode(Map<String, String> columnFamilyToPrefixMap,
.getCompactionNodeMap().values().stream()
.sorted(Comparator.comparing(CompactionNode::getFileName))
.map(node ->
rocksDBCheckpointDiffer.shouldSkipNode(node,
RocksDiffUtils.shouldSkipNode(node,
columnFamilyToPrefixMap))
.collect(Collectors.toList());

Expand Down Expand Up @@ -1932,7 +1932,7 @@ public void testShouldSkipNodeEdgeCase(

rocksDBCheckpointDiffer.loadAllCompactionLogs();

assertEquals(expectedResponse, rocksDBCheckpointDiffer.shouldSkipNode(node,
assertEquals(expectedResponse, RocksDiffUtils.shouldSkipNode(node,
columnFamilyToPrefixMap));
}

Expand Down

0 comments on commit 66ccc25

Please sign in to comment.