Skip to content

Commit

Permalink
HBASE-27794: Tooling for parsing/reading the prefetch files list file (
Browse files Browse the repository at this point in the history
…apache#5468)

Signed-off-by: Wellingt
  • Loading branch information
Kota-SH authored and wchevreuil committed Nov 2, 2023
1 parent e77c1c3 commit 497ffe2
Show file tree
Hide file tree
Showing 18 changed files with 142 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2629,4 +2629,9 @@ List<LogEntry> getLogEntries(Set<ServerName> serverNames, String logType, Server
* Flush master local region
*/
void flushMasterStore() throws IOException;

/**
* Get the list of cached files
*/
List<String> getCachedFilesList(ServerName serverName) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -1125,4 +1125,9 @@ public List<LogEntry> getLogEntries(Set<ServerName> serverNames, String logType,
public void flushMasterStore() throws IOException {
get(admin.flushMasterStore());
}

@Override
public List<String> getCachedFilesList(ServerName serverName) throws IOException {
return get(admin.getCachedFilesList(serverName));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1843,4 +1843,9 @@ CompletableFuture<List<LogEntry>> getLogEntries(Set<ServerName> serverNames, Str
* Flush master local region
*/
CompletableFuture<Void> flushMasterStore();

/**
* Get the list of cached files
*/
CompletableFuture<List<String>> getCachedFilesList(ServerName serverName);
}
Original file line number Diff line number Diff line change
Expand Up @@ -995,4 +995,9 @@ public CompletableFuture<List<LogEntry>> getLogEntries(Set<ServerName> serverNam
public CompletableFuture<Void> flushMasterStore() {
return wrap(rawAdmin.flushMasterStore());
}

@Override
public CompletableFuture<List<String>> getCachedFilesList(ServerName serverName) {
return wrap(rawAdmin.getCachedFilesList(serverName));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactionSwitchResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -4519,4 +4521,15 @@ Void> call(controller, stub, request.build(),
(s, c, req, done) -> s.flushMasterStore(c, req, done), resp -> null))
.call();
}

@Override
public CompletableFuture<List<String>> getCachedFilesList(ServerName serverName) {
GetCachedFilesListRequest.Builder request = GetCachedFilesListRequest.newBuilder();
return this.<List<String>> newAdminCaller()
.action((controller, stub) -> this.<GetCachedFilesListRequest, GetCachedFilesListResponse,
List<String>> adminCall(controller, stub, request.build(),
(s, c, req, done) -> s.getCachedFilesList(c, req, done),
resp -> resp.getCachedFilesList()))
.serverName(serverName).call();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearSlowLogResponses;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -1780,6 +1782,21 @@ public static List<org.apache.hadoop.hbase.client.RegionInfo> getOnlineRegions(
return getRegionInfos(response);
}

/**
* Get the list of cached files
*/
public static List<String> getCachedFilesList(final RpcController controller,
final AdminService.BlockingInterface admin) throws IOException {
GetCachedFilesListRequest request = GetCachedFilesListRequest.newBuilder().build();
GetCachedFilesListResponse response = null;
try {
response = admin.getCachedFilesList(controller, request);
} catch (ServiceException se) {
throw getRemoteException(se);
}
return new ArrayList<>(response.getCachedFilesList());
}

/**
* Get the list of region info from a GetOnlineRegionResponse
* @param proto the GetOnlineRegionResponse
Expand Down
10 changes: 10 additions & 0 deletions hbase-protocol-shaded/src/main/protobuf/server/region/Admin.proto
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,13 @@ message ExecuteProceduresRequest {
message ExecuteProceduresResponse {
}

message GetCachedFilesListRequest {
}

message GetCachedFilesListResponse {
repeated string cached_files = 1;
}

/**
* Slow/Large log (LogRequest) use-case specific RPC request. This request payload will be
* converted in bytes and sent to generic RPC API: GetLogEntries
Expand Down Expand Up @@ -405,4 +412,7 @@ service AdminService {
rpc GetLogEntries(LogRequest)
returns(LogEntry);

rpc GetCachedFilesList(GetCachedFilesListRequest)
returns(GetCachedFilesListResponse);

}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
package org.apache.hadoop.hbase.io.hfile;

import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;

/**
Expand Down Expand Up @@ -161,4 +164,11 @@ default Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repe
default boolean isMetaBlock(BlockType blockType) {
return blockType != null && blockType.getCategory() != BlockType.BlockCategory.DATA;
}

/**
* Returns the list of fully cached files
*/
default Optional<Map<String, Pair<String, Long>>> getFullyCachedFiles() {
return Optional.empty();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,11 @@
package org.apache.hadoop.hbase.io.hfile;

import java.util.Iterator;
import java.util.Map;
import java.util.Optional;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;

/**
Expand Down Expand Up @@ -382,6 +385,14 @@ public BlockCache[] getBlockCaches() {
return new BlockCache[] { this.l1Cache, this.l2Cache };
}

/**
* Returns the list of fully cached files
*/
@Override
public Optional<Map<String, Pair<String, Long>>> getFullyCachedFiles() {
return this.l2Cache.getFullyCachedFiles();
}

@Override
public void setMaxSize(long size) {
this.l1Cache.setMaxSize(size);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,9 @@ public HFilePreadReader(ReaderContext context, HFileInfo fileInfo, CacheConfig c
final MutableBoolean fileAlreadyCached = new MutableBoolean(false);
Optional<BucketCache> bucketCacheOptional =
BucketCache.getBucketCacheFromCacheConfig(cacheConf);
bucketCacheOptional.ifPresent(bc -> fileAlreadyCached
.setValue(bc.getFullyCachedFiles().get(path.getName()) == null ? false : true));
bucketCacheOptional.flatMap(BucketCache::getFullyCachedFiles).ifPresent(fcf -> {
fileAlreadyCached.setValue(fcf.get(path.getName()) == null ? false : true);
});
// Prefetch file blocks upon open if requested
if (
cacheConf.shouldPrefetchOnOpen() && cacheIfCompactionsOff()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2002,8 +2002,9 @@ public AtomicBoolean getBackingMapValidated() {
return backingMapValidated;
}

public Map<String, Pair<String, Long>> getFullyCachedFiles() {
return fullyCachedFiles;
@Override
public Optional<Map<String, Pair<String, Long>>> getFullyCachedFiles() {
return Optional.of(fullyCachedFiles);
}

public static Optional<BucketCache> getBucketCacheFromCacheConfig(CacheConfig cacheConf) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -3575,6 +3577,12 @@ public ExecuteProceduresResponse executeProcedures(RpcController controller,
throw new ServiceException(new DoNotRetryIOException("Unsupported method on master"));
}

@Override
public GetCachedFilesListResponse getCachedFilesList(RpcController controller,
GetCachedFilesListRequest request) throws ServiceException {
throw new ServiceException(new DoNotRetryIOException("Unsupported method on master"));
}

@Override
public GetLiveRegionServersResponse getLiveRegionServers(RpcController controller,
GetLiveRegionServersRequest request) throws ServiceException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -3933,4 +3935,15 @@ public void onConfigurationChange(Configuration conf) {
super.onConfigurationChange(conf);
setReloadableGuardrails(conf);
}

@Override
public GetCachedFilesListResponse getCachedFilesList(RpcController controller,
GetCachedFilesListRequest request) throws ServiceException {
GetCachedFilesListResponse.Builder responseBuilder = GetCachedFilesListResponse.newBuilder();
List<String> fullyCachedFiles = new ArrayList<>();
server.getBlockCache().flatMap(BlockCache::getFullyCachedFiles).ifPresent(fcf -> {
fullyCachedFiles.addAll(fcf.keySet());
});
return responseBuilder.addAllCachedFiles(fullyCachedFiles).build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,22 @@
import static org.junit.Assert.assertTrue;

import java.io.File;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.StartTestingClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand Down Expand Up @@ -112,7 +116,17 @@ public void testPrefetchPersistence() throws Exception {

// Default interval for cache persistence is 1000ms. So after 1000ms, both the persistence files
// should exist.
assertTrue(new File(testDir + "/bucket.persistence").exists());

HRegionServer regionServingRS = cluster.getRegionServer(1).getRegions(tableName).size() == 1
? cluster.getRegionServer(1)
: cluster.getRegionServer(0);

Admin admin = TEST_UTIL.getAdmin();
List<String> cachedFilesList = admin.getCachedFilesList(regionServingRS.getServerName());
assertEquals(1, cachedFilesList.size());
for (HStoreFile h : regionServingRS.getRegions().get(0).getStores().get(0).getStorefiles()) {
assertTrue(cachedFilesList.contains(h.getPath().getName()));
}

// Stop the RS
cluster.stopRegionServer(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ public void testPrefetchDoesntOverwork() throws Exception {
BlockCacheKey key = snapshot.keySet().stream().findFirst().get();
LOG.debug("removing block {}", key);
bc.getBackingMap().remove(key);
bc.getFullyCachedFiles().remove(storeFile.getName());
bc.getFullyCachedFiles().ifPresent(fcf -> fcf.remove(storeFile.getName()));
assertTrue(snapshot.size() > bc.getBackingMap().size());
LOG.debug("Third read should prefetch again, as we removed one block for the file.");
readStoreFile(storeFile);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -452,6 +454,12 @@ public GetOnlineRegionResponse getOnlineRegion(RpcController controller,
return null;
}

@Override
public GetCachedFilesListResponse getCachedFilesList(RpcController controller,
GetCachedFilesListRequest request) throws ServiceException {
return null;
}

@Override
public List<HRegion> getRegions() {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -969,6 +969,11 @@ public void flushMasterStore() throws IOException {
admin.flushMasterStore();
}

@Override
public List<String> getCachedFilesList(ServerName serverName) throws IOException {
return admin.getCachedFilesList(serverName);
}

@Override
public boolean replicationPeerModificationSwitch(boolean on, boolean drainProcedures)
throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1345,6 +1345,11 @@ public void flushMasterStore() throws IOException {
throw new NotImplementedException("flushMasterStore not supported in ThriftAdmin");
}

@Override
public List<String> getCachedFilesList(ServerName serverName) throws IOException {
throw new NotImplementedException("getCachedFilesList not supported in ThriftAdmin");
}

@Override
public boolean replicationPeerModificationSwitch(boolean on, boolean drainProcedures)
throws IOException {
Expand Down

0 comments on commit 497ffe2

Please sign in to comment.