Skip to content

Commit

Permalink
HBASE-27794: Tooling for parsing/reading the prefetch files list file
Browse files Browse the repository at this point in the history
  • Loading branch information
Shanmukha Kota committed Oct 25, 2023
1 parent 391dfda commit 153d50c
Show file tree
Hide file tree
Showing 14 changed files with 150 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -2615,4 +2615,9 @@ List<LogEntry> getLogEntries(Set<ServerName> serverNames, String logType, Server
* Flush master local region
*/
void flushMasterStore() throws IOException;

/**
* Get the list of cached files
*/
List<String> getCachedFilesList(ServerName serverName) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -1115,4 +1115,9 @@ public List<LogEntry> getLogEntries(Set<ServerName> serverNames, String logType,
public void flushMasterStore() throws IOException {
get(admin.flushMasterStore());
}

@Override
public List<String> getCachedFilesList(ServerName serverName) throws IOException {
return get(admin.getCachedFilesList(serverName));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1837,4 +1837,9 @@ CompletableFuture<List<LogEntry>> getLogEntries(Set<ServerName> serverNames, Str
* Flush master local region
*/
CompletableFuture<Void> flushMasterStore();

/**
* Get the list of cached files
*/
CompletableFuture<List<String>> getCachedFilesList(ServerName serverName);
}
Original file line number Diff line number Diff line change
Expand Up @@ -990,4 +990,9 @@ public CompletableFuture<List<LogEntry>> getLogEntries(Set<ServerName> serverNam
public CompletableFuture<Void> flushMasterStore() {
return wrap(rawAdmin.flushMasterStore());
}

@Override
public CompletableFuture<List<String>> getCachedFilesList(ServerName serverName) {
return wrap(rawAdmin.getCachedFilesList(serverName));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactionSwitchResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -4453,4 +4455,15 @@ Void> call(controller, stub, request.build(),
(s, c, req, done) -> s.flushMasterStore(c, req, done), resp -> null))
.call();
}

@Override
public CompletableFuture<List<String>> getCachedFilesList(ServerName serverName) {
GetCachedFilesListRequest.Builder request = GetCachedFilesListRequest.newBuilder();
return this.<List<String>> newAdminCaller()
.action((controller, stub) -> this.<GetCachedFilesListRequest, GetCachedFilesListResponse,
List<String>> adminCall(controller, stub, request.build(),
(s, c, req, done) -> s.getCachedFilesList(c, req, done),
resp -> resp.getCachedFilesList()))
.serverName(serverName).call();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearSlowLogResponses;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -1780,6 +1782,21 @@ public static List<org.apache.hadoop.hbase.client.RegionInfo> getOnlineRegions(
return getRegionInfos(response);
}

/**
* Get the list of cached files
*/
public static List<String> getCachedFilesList(final RpcController controller,
final AdminService.BlockingInterface admin) throws IOException {
GetCachedFilesListRequest request = GetCachedFilesListRequest.newBuilder().build();
GetCachedFilesListResponse response = null;
try {
response = admin.getCachedFilesList(controller, request);
} catch (ServiceException se) {
throw getRemoteException(se);
}
return new ArrayList<>(response.getCachedFilesList());
}

/**
* Get the list of region info from a GetOnlineRegionResponse
* @param proto the GetOnlineRegionResponse
Expand Down
10 changes: 10 additions & 0 deletions hbase-protocol-shaded/src/main/protobuf/server/region/Admin.proto
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,13 @@ message ExecuteProceduresRequest {
message ExecuteProceduresResponse {
}

message GetCachedFilesListRequest {
}

message GetCachedFilesListResponse {
repeated string cached_files = 1;
}

/**
* Slow/Large log (LogRequest) use-case specific RPC request. This request payload will be
* converted in bytes and sent to generic RPC API: GetLogEntries
Expand Down Expand Up @@ -405,4 +412,7 @@ service AdminService {
rpc GetLogEntries(LogRequest)
returns(LogEntry);

rpc GetCachedFilesList(GetCachedFilesListRequest)
returns(GetCachedFilesListResponse);

}
Original file line number Diff line number Diff line change
Expand Up @@ -1936,4 +1936,19 @@ public static Optional<BucketCache> getBuckedCacheFromCacheConfig(CacheConfig ca
return Optional.empty();
}

public List<String> getCachedFilesList() throws IOException {
List<String> tmp = new ArrayList<>();
File persistedFile = new File(persistencePath);
try (FileInputStream fis = new FileInputStream(persistedFile)) {
if (fis.skip(ProtobufMagic.lengthOfPBMagic()) != ProtobufMagic.lengthOfPBMagic()) {
throw new IOException(
"Incorrect number of bytes read while checking for protobuf magic number");
}
BucketCacheProtos.BucketCacheEntry proto =
BucketCacheProtos.BucketCacheEntry.parseDelimitedFrom(fis);
tmp.addAll(proto.getPrefetchedFilesMap().keySet());
}
return tmp;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -3563,6 +3565,11 @@ public ExecuteProceduresResponse executeProcedures(RpcController controller,
throw new ServiceException(new DoNotRetryIOException("Unsupported method on master"));
}

public GetCachedFilesListResponse getCachedFilesList(RpcController controller,
GetCachedFilesListRequest request) throws ServiceException {
throw new ServiceException(new DoNotRetryIOException("Unsupported method on master"));
}

@Override
public GetLiveRegionServersResponse getLiveRegionServers(RpcController controller,
GetLiveRegionServersRequest request) throws ServiceException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.PriorityFunction;
import org.apache.hadoop.hbase.ipc.QosPriority;
Expand Down Expand Up @@ -169,6 +171,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -3933,4 +3937,29 @@ public void onConfigurationChange(Configuration conf) {
super.onConfigurationChange(conf);
setReloadableGuardrails(conf);
}

@Override
public GetCachedFilesListResponse getCachedFilesList(RpcController controller,
GetCachedFilesListRequest request) throws ServiceException {
GetCachedFilesListResponse.Builder responseBuilder = GetCachedFilesListResponse.newBuilder();
List<String> fullyCachedFiles = new ArrayList<>();

try {
CacheConfig cacheConfig = server.getRegions().get(0).getStores().get(0).getCacheConfig();
BucketCache.getBuckedCacheFromCacheConfig(cacheConfig).ifPresent(bucketCache -> {
if (bucketCache.isCachePersistenceEnabled()) {
LOG.info("Fetching fully cached files");
try {
fullyCachedFiles.addAll(bucketCache.getCachedFilesList());
} catch (IOException e) {
LOG.info("Exception occurred reading the cache persistence file: {}", e.getMessage());
}
}
});
return responseBuilder.addAllCachedFiles(fullyCachedFiles).build();
} catch (IndexOutOfBoundsException | NullPointerException e) {
LOG.error("Error fetching cache config, Region or Store doesn't exist!");
throw new ServiceException(e);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,23 @@
import static org.junit.Assert.assertTrue;

import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.SingleProcessHBaseCluster;
import org.apache.hadoop.hbase.StartTestingClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.protobuf.ProtobufMagic;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand All @@ -47,6 +52,8 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;

@Category({ IOTests.class, LargeTests.class })
public class TestPrefetchRSClose {

Expand Down Expand Up @@ -112,7 +119,20 @@ public void testPrefetchPersistence() throws Exception {

// Default interval for cache persistence is 1000ms. So after 1000ms, both the persistence files
// should exist.
assertTrue(new File(testDir + "/bucket.persistence").exists());

HRegionServer regionServingRS = cluster.getRegionServer(1).getRegions(tableName).size() == 1
? cluster.getRegionServer(1)
: cluster.getRegionServer(0);

Admin admin = TEST_UTIL.getAdmin();
File persistenceFile = new File(testDir + "/bucket.persistence");
assertTrue(persistenceFile.exists());
FileInputStream fis = new FileInputStream(persistenceFile);
fis.skip(ProtobufMagic.lengthOfPBMagic());
BucketCacheProtos.BucketCacheEntry proto =
BucketCacheProtos.BucketCacheEntry.parseDelimitedFrom(fis);
assertEquals(new ArrayList<>(proto.getPrefetchedFilesMap().keySet()),
admin.getCachedFilesList(regionServingRS.getServerName()));

// Stop the RS
cluster.stopRegionServer(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetCachedFilesListResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
Expand Down Expand Up @@ -452,6 +454,12 @@ public GetOnlineRegionResponse getOnlineRegion(RpcController controller,
return null;
}

@Override
public GetCachedFilesListResponse getCachedFilesList(RpcController controller,
GetCachedFilesListRequest request) throws ServiceException {
return null;
}

@Override
public List<HRegion> getRegions() {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -959,6 +959,11 @@ public void flushMasterStore() throws IOException {
admin.flushMasterStore();
}

@Override
public List<String> getCachedFilesList(ServerName serverName) throws IOException {
return admin.getCachedFilesList(serverName);
}

@Override
public boolean replicationPeerModificationSwitch(boolean on, boolean drainProcedures)
throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1335,6 +1335,11 @@ public void flushMasterStore() throws IOException {
throw new NotImplementedException("flushMasterStore not supported in ThriftAdmin");
}

@Override
public List<String> getCachedFilesList(ServerName serverName) throws IOException {
throw new NotImplementedException("getCachedFilesList not supported in ThriftAdmin");
}

@Override
public boolean replicationPeerModificationSwitch(boolean on, boolean drainProcedures)
throws IOException {
Expand Down

0 comments on commit 153d50c

Please sign in to comment.