diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index 899a681131f4..8f1bb3be7a5b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -1172,6 +1172,32 @@ public HFileBlock getCachedBlock(BlockCacheKey cacheKey, boolean cacheBlock, boo } return cachedBlock; } + } catch (Exception e) { + if (cachedBlock != null) { + returnAndEvictBlock(cache, cacheKey, cachedBlock); + } + LOG.warn("Failed retrieving block from cache with key {}. " + + "\n Evicting this block from cache and will read it from file system. " + + "\n Exception details: ", cacheKey, e); + if (LOG.isDebugEnabled()) { + LOG.debug("Further tracing details for failed block cache retrieval:" + + "\n Complete File path - {}," + "\n Expected Block Type - {}, Actual Block Type - {}," + + "\n Cache compressed - {}" + "\n Header size (after deserialized from cache) - {}" + + "\n Size with header - {}" + "\n Uncompressed size without header - {} " + + "\n Total byte buffer size - {}" + "\n Encoding code - {}", this.path, + expectedBlockType, (cachedBlock != null ? cachedBlock.getBlockType() : "N/A"), + (expectedBlockType != null + ? cacheConf.shouldCacheCompressed(expectedBlockType.getCategory()) + : "N/A"), + (cachedBlock != null ? cachedBlock.headerSize() : "N/A"), + (cachedBlock != null ? cachedBlock.getOnDiskSizeWithHeader() : "N/A"), + (cachedBlock != null ? cachedBlock.getUncompressedSizeWithoutHeader() : "N/A"), + (cachedBlock != null ? cachedBlock.getBufferReadOnly().limit() : "N/A"), + (cachedBlock != null + ? cachedBlock.getBufferReadOnly().getShort(cachedBlock.headerSize()) + : "N/A")); + } + return null; } finally { // Count bytes read as cached block is being returned if (isScanMetricsEnabled && cachedBlock != null) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java index c87897de8187..6c84312cf599 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderImpl.java @@ -18,7 +18,12 @@ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; @@ -116,6 +121,23 @@ public void testRecordBlockSize() throws IOException { } } + @Test + public void testReadWorksWhenCacheCorrupt() throws Exception { + BlockCache mockedCache = mock(BlockCache.class); + when(mockedCache.getBlock(any(), anyBoolean(), anyBoolean(), anyBoolean(), any())) + .thenThrow(new RuntimeException("Injected error")); + Path p = makeNewFile(); + FileSystem fs = TEST_UTIL.getTestFileSystem(); + Configuration conf = TEST_UTIL.getConfiguration(); + HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf, mockedCache), true, conf); + long offset = 0; + while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) { + HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null, false); + assertNotNull(block); + offset += block.getOnDiskSizeWithHeader(); + } + } + @Test public void testSeekBefore() throws Exception { Path p = makeNewFile();