From 995dbcb5fa4d7645ec079f5c9a07e9cfa1bc274b Mon Sep 17 00:00:00 2001 From: chenxu14 Date: Fri, 15 Nov 2019 14:25:42 +0800 Subject: [PATCH] HBASE-23296 Add CompositeBucketCache to support tiered BC --- .../hbase/io/hfile/MemcachedBlockCache.java | 5 + .../hadoop/hbase/io/hfile/BlockCache.java | 6 + .../hbase/io/hfile/BlockCacheFactory.java | 77 ++-- .../hadoop/hbase/io/hfile/CacheConfig.java | 4 + .../hbase/io/hfile/CombinedBlockCache.java | 346 +--------------- .../hbase/io/hfile/CompositeBlockCache.java | 379 ++++++++++++++++++ .../hbase/io/hfile/CompositeBucketCache.java | 40 ++ .../hbase/io/hfile/HFileReaderImpl.java | 6 +- .../hbase/io/hfile/bucket/BucketCache.java | 39 +- .../hadoop/hbase/io/util/MemorySizeUtil.java | 22 +- .../MetricsRegionServerWrapperImpl.java | 4 +- .../hbase/io/hfile/TestCacheConfig.java | 2 +- .../io/hfile/TestCombinedBlockCache.java | 78 +--- .../io/hfile/TestCompositeBlockCache.java | 101 +++++ .../io/hfile/TestCompositeBucketCache.java | 48 +++ .../regionserver/TestHeapMemoryManager.java | 5 + 16 files changed, 698 insertions(+), 464 deletions(-) create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBlockCache.java create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBucketCache.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBlockCache.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBucketCache.java diff --git a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java index 6ccd138f70dc..06cacff6472a 100644 --- a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java +++ b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java @@ -156,6 +156,11 @@ public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, return result; } + @Override + public boolean containsBlock(BlockCacheKey cacheKey) { + return client.get(cacheKey.toString(), tc) != null; + } + @Override public boolean evictBlock(BlockCacheKey cacheKey) { try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java index 6849a9780ed5..93a35552e385 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java @@ -28,6 +28,10 @@ */ @InterfaceAudience.Private public interface BlockCache extends Iterable { + public enum CacheLevel { + L1, L2 + } + /** * Add block to cache. * @param cacheKey The block's cache key. @@ -132,4 +136,6 @@ Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat, * @return The list of sub blockcaches that make up this one; returns null if no sub caches. */ BlockCache [] getBlockCaches(); + + boolean containsBlock(BlockCacheKey cacheKey); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.java index 2b9732092ce9..3c4924f9dcfa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheFactory.java @@ -25,6 +25,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.hfile.BlockCache.CacheLevel; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.util.ReflectionUtils; @@ -64,6 +65,8 @@ public final class BlockCacheFactory { public static final String BUCKET_CACHE_WRITER_QUEUE_KEY = "hbase.bucketcache.writer.queuelength"; + public static final String BUCKET_CACHE_COMPOSITE_KEY = "hbase.bucketcache.composite.enabled"; + /** * A comma-delimited array of values for use as bucket sizes. */ @@ -110,29 +113,35 @@ public static BlockCache createBlockCache(Configuration conf) { + "we will remove the deprecated config.", DEPRECATED_BLOCKCACHE_BLOCKSIZE_KEY, BLOCKCACHE_BLOCKSIZE_KEY); } - FirstLevelBlockCache l1Cache = createFirstLevelCache(conf); + BlockCache l1Cache = createFirstLevelCache(conf); if (l1Cache == null) { return null; } - boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT); - if (useExternal) { - BlockCache l2CacheInstance = createExternalBlockcache(conf); - return l2CacheInstance == null ? - l1Cache : - new InclusiveCombinedBlockCache(l1Cache, l2CacheInstance); + if (conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT)) { + BlockCache l2Cache = createExternalBlockcache(conf); + return l2Cache == null ? l1Cache : new InclusiveCombinedBlockCache( + (FirstLevelBlockCache)l1Cache, l2Cache); } else { // otherwise use the bucket cache. - BucketCache bucketCache = createBucketCache(conf); - if (!conf.getBoolean("hbase.bucketcache.combinedcache.enabled", true)) { - // Non combined mode is off from 2.0 - LOG.warn( - "From HBase 2.0 onwards only combined mode of LRU cache and bucket cache is available"); + BucketCache l2Cache = createBucketCache(conf, CacheLevel.L2); + if (conf.getBoolean(BUCKET_CACHE_COMPOSITE_KEY, false)) { + return l2Cache == null ? l1Cache : new CompositeBucketCache((BucketCache)l1Cache, l2Cache); + } else { + if (!conf.getBoolean("hbase.bucketcache.combinedcache.enabled", true)) { + // Non combined mode is off from 2.0 + LOG.warn("From HBase 2.0 onwards only combined mode of LRU cache and bucket" + + " cache is available"); + } + return l2Cache == null ? l1Cache : new CombinedBlockCache( + (FirstLevelBlockCache)l1Cache, l2Cache); } - return bucketCache == null ? l1Cache : new CombinedBlockCache(l1Cache, bucketCache); } } - private static FirstLevelBlockCache createFirstLevelCache(final Configuration c) { + private static BlockCache createFirstLevelCache(final Configuration c) { + if (c.getBoolean(BUCKET_CACHE_COMPOSITE_KEY, false)) { + return createBucketCache(c, CacheLevel.L1); + } final long cacheSize = MemorySizeUtil.getOnHeapCacheSize(c); if (cacheSize < 0) { return null; @@ -200,28 +209,48 @@ private static BlockCache createExternalBlockcache(Configuration c) { } - private static BucketCache createBucketCache(Configuration c) { - // Check for L2. ioengine name must be non-null. - String bucketCacheIOEngineName = c.get(BUCKET_CACHE_IOENGINE_KEY, null); + private static BucketCache createBucketCache(Configuration c, CacheLevel level) { + // Check for ioengine name must be non-null. + String bucketCacheIOEngineName; + int writerThreads; + int writerQueueLen; + String persistentPath; + switch(level) { + case L1: + bucketCacheIOEngineName = c.get(CompositeBucketCache.IOENGINE_L1, null); + writerThreads = c.getInt(CompositeBucketCache.WRITER_THREADS_L1, + DEFAULT_BUCKET_CACHE_WRITER_THREADS); + writerQueueLen = c.getInt(CompositeBucketCache.WRITER_QUEUE_LENGTH_L1, + DEFAULT_BUCKET_CACHE_WRITER_QUEUE); + persistentPath = c.get(CompositeBucketCache.PERSISTENT_PATH_L1); + break; + case L2: + default: + bucketCacheIOEngineName = c.get(CompositeBucketCache.IOENGINE_L2, + c.get(BUCKET_CACHE_IOENGINE_KEY, null)); + writerThreads = c.getInt(CompositeBucketCache.WRITER_THREADS_L2, + c.getInt(BUCKET_CACHE_WRITER_THREADS_KEY, DEFAULT_BUCKET_CACHE_WRITER_THREADS)); + writerQueueLen = c.getInt(CompositeBucketCache.WRITER_QUEUE_LENGTH_L2, + c.getInt(BUCKET_CACHE_WRITER_QUEUE_KEY, DEFAULT_BUCKET_CACHE_WRITER_QUEUE)); + persistentPath = c.get(CompositeBucketCache.PERSISTENT_PATH_L2, + c.get(BUCKET_CACHE_PERSISTENT_PATH_KEY)); + break; + } if (bucketCacheIOEngineName == null || bucketCacheIOEngineName.length() <= 0) { return null; } int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE); - final long bucketCacheSize = MemorySizeUtil.getBucketCacheSize(c); + final long bucketCacheSize = MemorySizeUtil.getBucketCacheSize(c, level); if (bucketCacheSize <= 0) { throw new IllegalStateException("bucketCacheSize <= 0; Check " + BUCKET_CACHE_SIZE_KEY + " setting and/or server java heap size"); } + if (c.get("hbase.bucketcache.percentage.in.combinedcache") != null) { LOG.warn("Configuration 'hbase.bucketcache.percentage.in.combinedcache' is no longer " + "respected. See comments in http://hbase.apache.org/book.html#_changes_of_note"); } - int writerThreads = c.getInt(BUCKET_CACHE_WRITER_THREADS_KEY, - DEFAULT_BUCKET_CACHE_WRITER_THREADS); - int writerQueueLen = c.getInt(BUCKET_CACHE_WRITER_QUEUE_KEY, - DEFAULT_BUCKET_CACHE_WRITER_QUEUE); - String persistentPath = c.get(BUCKET_CACHE_PERSISTENT_PATH_KEY); String[] configuredBucketSizes = c.getStrings(BUCKET_CACHE_BUCKETS_KEY); int [] bucketSizes = null; if (configuredBucketSizes != null) { @@ -248,7 +277,7 @@ private static BucketCache createBucketCache(Configuration c) { // Bucket cache logs its stats on creation internal to the constructor. bucketCache = new BucketCache(bucketCacheIOEngineName, bucketCacheSize, blockSize, bucketSizes, writerThreads, writerQueueLen, persistentPath, - ioErrorsTolerationDuration, c); + ioErrorsTolerationDuration, level, c); } catch (IOException ioex) { LOG.error("Can't instantiate bucket cache", ioex); throw new RuntimeException(ioex); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java index bb57fbe06b60..7d43aeffac34 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java @@ -371,6 +371,10 @@ public boolean isCombinedBlockCache() { return blockCache instanceof CombinedBlockCache; } + public boolean isCompositeBucketCache() { + return blockCache instanceof CompositeBucketCache; + } + public ByteBuffAllocator getByteBuffAllocator() { return this.byteBuffAllocator; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java index 3691635902ac..52ffc8c5c1b4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java @@ -18,16 +18,10 @@ */ package org.apache.hadoop.hbase.io.hfile; -import java.util.Iterator; - -import org.apache.yetus.audience.InterfaceAudience; -import org.apache.hadoop.hbase.io.HeapSize; -import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; - +import org.apache.yetus.audience.InterfaceAudience; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; - /** * CombinedBlockCache is an abstraction layer that combines * {@link FirstLevelBlockCache} and {@link BucketCache}. The smaller lruCache is used @@ -38,345 +32,15 @@ * Metrics are the combined size and hits and misses of both caches. */ @InterfaceAudience.Private -public class CombinedBlockCache implements ResizableBlockCache, HeapSize { - protected final FirstLevelBlockCache l1Cache; - protected final BlockCache l2Cache; - protected final CombinedCacheStats combinedCacheStats; +public class CombinedBlockCache extends CompositeBlockCache implements ResizableBlockCache { public CombinedBlockCache(FirstLevelBlockCache l1Cache, BlockCache l2Cache) { - this.l1Cache = l1Cache; - this.l2Cache = l2Cache; - this.combinedCacheStats = new CombinedCacheStats(l1Cache.getStats(), - l2Cache.getStats()); - } - - @Override - public long heapSize() { - long l2size = 0; - if (l2Cache instanceof HeapSize) { - l2size = ((HeapSize) l2Cache).heapSize(); - } - return l1Cache.heapSize() + l2size; - } - - @Override - public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) { - boolean metaBlock = buf.getBlockType().getCategory() != BlockCategory.DATA; - if (metaBlock) { - l1Cache.cacheBlock(cacheKey, buf, inMemory); - } else { - l2Cache.cacheBlock(cacheKey, buf, inMemory); - } - } - - @Override - public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) { - cacheBlock(cacheKey, buf, false); - } - - @Override - public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, - boolean repeat, boolean updateCacheMetrics) { - // TODO: is there a hole here, or just awkwardness since in the lruCache getBlock - // we end up calling l2Cache.getBlock. - // We are not in a position to exactly look at LRU cache or BC as BlockType may not be getting - // passed always. - return l1Cache.containsBlock(cacheKey)? - l1Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics): - l2Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics); - } - - @Override - public boolean evictBlock(BlockCacheKey cacheKey) { - return l1Cache.evictBlock(cacheKey) || l2Cache.evictBlock(cacheKey); - } - - @Override - public int evictBlocksByHfileName(String hfileName) { - return l1Cache.evictBlocksByHfileName(hfileName) - + l2Cache.evictBlocksByHfileName(hfileName); - } - - @Override - public CacheStats getStats() { - return this.combinedCacheStats; - } - - @Override - public void shutdown() { - l1Cache.shutdown(); - l2Cache.shutdown(); - } - - @Override - public long size() { - return l1Cache.size() + l2Cache.size(); - } - - @Override - public long getMaxSize() { - return l1Cache.getMaxSize() + l2Cache.getMaxSize(); - } - - @Override - public long getCurrentDataSize() { - return l1Cache.getCurrentDataSize() + l2Cache.getCurrentDataSize(); - } - - @Override - public long getFreeSize() { - return l1Cache.getFreeSize() + l2Cache.getFreeSize(); - } - - @Override - public long getCurrentSize() { - return l1Cache.getCurrentSize() + l2Cache.getCurrentSize(); - } - - @Override - public long getBlockCount() { - return l1Cache.getBlockCount() + l2Cache.getBlockCount(); - } - - @Override - public long getDataBlockCount() { - return l1Cache.getDataBlockCount() + l2Cache.getDataBlockCount(); - } - - public static class CombinedCacheStats extends CacheStats { - private final CacheStats lruCacheStats; - private final CacheStats bucketCacheStats; - - CombinedCacheStats(CacheStats lbcStats, CacheStats fcStats) { - super("CombinedBlockCache"); - this.lruCacheStats = lbcStats; - this.bucketCacheStats = fcStats; - } - - public CacheStats getLruCacheStats() { - return this.lruCacheStats; - } - - public CacheStats getBucketCacheStats() { - return this.bucketCacheStats; - } - - @Override - public long getDataMissCount() { - return lruCacheStats.getDataMissCount() + bucketCacheStats.getDataMissCount(); - } - - @Override - public long getLeafIndexMissCount() { - return lruCacheStats.getLeafIndexMissCount() + bucketCacheStats.getLeafIndexMissCount(); - } - - @Override - public long getBloomChunkMissCount() { - return lruCacheStats.getBloomChunkMissCount() + bucketCacheStats.getBloomChunkMissCount(); - } - - @Override - public long getMetaMissCount() { - return lruCacheStats.getMetaMissCount() + bucketCacheStats.getMetaMissCount(); - } - - @Override - public long getRootIndexMissCount() { - return lruCacheStats.getRootIndexMissCount() + bucketCacheStats.getRootIndexMissCount(); - } - - @Override - public long getIntermediateIndexMissCount() { - return lruCacheStats.getIntermediateIndexMissCount() + - bucketCacheStats.getIntermediateIndexMissCount(); - } - - @Override - public long getFileInfoMissCount() { - return lruCacheStats.getFileInfoMissCount() + bucketCacheStats.getFileInfoMissCount(); - } - - @Override - public long getGeneralBloomMetaMissCount() { - return lruCacheStats.getGeneralBloomMetaMissCount() + - bucketCacheStats.getGeneralBloomMetaMissCount(); - } - - @Override - public long getDeleteFamilyBloomMissCount() { - return lruCacheStats.getDeleteFamilyBloomMissCount() + - bucketCacheStats.getDeleteFamilyBloomMissCount(); - } - - @Override - public long getTrailerMissCount() { - return lruCacheStats.getTrailerMissCount() + bucketCacheStats.getTrailerMissCount(); - } - - @Override - public long getDataHitCount() { - return lruCacheStats.getDataHitCount() + bucketCacheStats.getDataHitCount(); - } - - @Override - public long getLeafIndexHitCount() { - return lruCacheStats.getLeafIndexHitCount() + bucketCacheStats.getLeafIndexHitCount(); - } - - @Override - public long getBloomChunkHitCount() { - return lruCacheStats.getBloomChunkHitCount() + bucketCacheStats.getBloomChunkHitCount(); - } - - @Override - public long getMetaHitCount() { - return lruCacheStats.getMetaHitCount() + bucketCacheStats.getMetaHitCount(); - } - - @Override - public long getRootIndexHitCount() { - return lruCacheStats.getRootIndexHitCount() + bucketCacheStats.getRootIndexHitCount(); - } - - @Override - public long getIntermediateIndexHitCount() { - return lruCacheStats.getIntermediateIndexHitCount() + - bucketCacheStats.getIntermediateIndexHitCount(); - } - - @Override - public long getFileInfoHitCount() { - return lruCacheStats.getFileInfoHitCount() + bucketCacheStats.getFileInfoHitCount(); - } - - @Override - public long getGeneralBloomMetaHitCount() { - return lruCacheStats.getGeneralBloomMetaHitCount() + - bucketCacheStats.getGeneralBloomMetaHitCount(); - } - - @Override - public long getDeleteFamilyBloomHitCount() { - return lruCacheStats.getDeleteFamilyBloomHitCount() + - bucketCacheStats.getDeleteFamilyBloomHitCount(); - } - - @Override - public long getTrailerHitCount() { - return lruCacheStats.getTrailerHitCount() + bucketCacheStats.getTrailerHitCount(); - } - - @Override - public long getRequestCount() { - return lruCacheStats.getRequestCount() - + bucketCacheStats.getRequestCount(); - } - - @Override - public long getRequestCachingCount() { - return lruCacheStats.getRequestCachingCount() - + bucketCacheStats.getRequestCachingCount(); - } - - @Override - public long getMissCount() { - return lruCacheStats.getMissCount() + bucketCacheStats.getMissCount(); - } - - @Override - public long getPrimaryMissCount() { - return lruCacheStats.getPrimaryMissCount() + bucketCacheStats.getPrimaryMissCount(); - } - - @Override - public long getMissCachingCount() { - return lruCacheStats.getMissCachingCount() - + bucketCacheStats.getMissCachingCount(); - } - - @Override - public long getHitCount() { - return lruCacheStats.getHitCount() + bucketCacheStats.getHitCount(); - } - - @Override - public long getPrimaryHitCount() { - return lruCacheStats.getPrimaryHitCount() + bucketCacheStats.getPrimaryHitCount(); - } - @Override - public long getHitCachingCount() { - return lruCacheStats.getHitCachingCount() - + bucketCacheStats.getHitCachingCount(); - } - - @Override - public long getEvictionCount() { - return lruCacheStats.getEvictionCount() - + bucketCacheStats.getEvictionCount(); - } - - @Override - public long getEvictedCount() { - return lruCacheStats.getEvictedCount() - + bucketCacheStats.getEvictedCount(); - } - - @Override - public long getPrimaryEvictedCount() { - return lruCacheStats.getPrimaryEvictedCount() - + bucketCacheStats.getPrimaryEvictedCount(); - } - - @Override - public void rollMetricsPeriod() { - lruCacheStats.rollMetricsPeriod(); - bucketCacheStats.rollMetricsPeriod(); - } - - @Override - public long getFailedInserts() { - return lruCacheStats.getFailedInserts() + bucketCacheStats.getFailedInserts(); - } - - @Override - public long getSumHitCountsPastNPeriods() { - return lruCacheStats.getSumHitCountsPastNPeriods() - + bucketCacheStats.getSumHitCountsPastNPeriods(); - } - - @Override - public long getSumRequestCountsPastNPeriods() { - return lruCacheStats.getSumRequestCountsPastNPeriods() - + bucketCacheStats.getSumRequestCountsPastNPeriods(); - } - - @Override - public long getSumHitCachingCountsPastNPeriods() { - return lruCacheStats.getSumHitCachingCountsPastNPeriods() - + bucketCacheStats.getSumHitCachingCountsPastNPeriods(); - } - - @Override - public long getSumRequestCachingCountsPastNPeriods() { - return lruCacheStats.getSumRequestCachingCountsPastNPeriods() - + bucketCacheStats.getSumRequestCachingCountsPastNPeriods(); - } - } - - @Override - public Iterator iterator() { - return new BlockCachesIterator(getBlockCaches()); - } - - @Override - public BlockCache[] getBlockCaches() { - return new BlockCache [] {this.l1Cache, this.l2Cache}; + super(l1Cache, l2Cache); } @Override public void setMaxSize(long size) { - this.l1Cache.setMaxSize(size); + ((FirstLevelBlockCache) l1Cache).setMaxSize(size); } @VisibleForTesting @@ -387,6 +51,6 @@ public int getRpcRefCount(BlockCacheKey cacheKey) { } public FirstLevelBlockCache getFirstLevelCache() { - return l1Cache; + return (FirstLevelBlockCache) l1Cache; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBlockCache.java new file mode 100644 index 000000000000..cf0933e6187d --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBlockCache.java @@ -0,0 +1,379 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import java.util.Iterator; +import org.apache.hadoop.hbase.io.HeapSize; +import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; +import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; +import org.apache.yetus.audience.InterfaceAudience; +import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; + +@InterfaceAudience.Private +public class CompositeBlockCache implements BlockCache, HeapSize { + protected final BlockCache l1Cache; + protected final BlockCache l2Cache; + protected final CombinedCacheStats combinedCacheStats; + + public CompositeBlockCache(BlockCache l1Cache, BlockCache l2Cache) { + this.l1Cache = l1Cache; + this.l2Cache = l2Cache; + this.combinedCacheStats = new CombinedCacheStats(l1Cache.getStats(), + l2Cache.getStats()); + } + + @Override + public long heapSize() { + long l1size = 0, l2size = 0; + if (l1Cache instanceof HeapSize) { + l1size = ((HeapSize) l1Cache).heapSize(); + } + if (l2Cache instanceof HeapSize) { + l2size = ((HeapSize) l2Cache).heapSize(); + } + return l1size + l2size; + } + + @Override + public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) { + boolean metaBlock = buf.getBlockType().getCategory() != BlockCategory.DATA; + if (metaBlock) { + l1Cache.cacheBlock(cacheKey, buf, inMemory); + } else { + l2Cache.cacheBlock(cacheKey, buf, inMemory); + } + } + + @Override + public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) { + cacheBlock(cacheKey, buf, false); + } + + @Override + public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, + boolean repeat, boolean updateCacheMetrics) { + // TODO: is there a hole here, or just awkwardness since in the lruCache getBlock + // we end up calling l2Cache.getBlock. + // We are not in a position to exactly look at LRU cache or BC as BlockType may not be getting + // passed always. + return l1Cache.containsBlock(cacheKey)? + l1Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics): + l2Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics); + } + + @Override + public boolean evictBlock(BlockCacheKey cacheKey) { + return l1Cache.evictBlock(cacheKey) || l2Cache.evictBlock(cacheKey); + } + + @Override + public int evictBlocksByHfileName(String hfileName) { + return l1Cache.evictBlocksByHfileName(hfileName) + + l2Cache.evictBlocksByHfileName(hfileName); + } + + @Override + public CacheStats getStats() { + return this.combinedCacheStats; + } + + @Override + public void shutdown() { + l1Cache.shutdown(); + l2Cache.shutdown(); + } + + @Override + public long size() { + return l1Cache.size() + l2Cache.size(); + } + + @Override + public long getMaxSize() { + return l1Cache.getMaxSize() + l2Cache.getMaxSize(); + } + + @Override + public long getCurrentDataSize() { + return l1Cache.getCurrentDataSize() + l2Cache.getCurrentDataSize(); + } + + @Override + public long getFreeSize() { + return l1Cache.getFreeSize() + l2Cache.getFreeSize(); + } + + @Override + public long getCurrentSize() { + return l1Cache.getCurrentSize() + l2Cache.getCurrentSize(); + } + + @Override + public long getBlockCount() { + return l1Cache.getBlockCount() + l2Cache.getBlockCount(); + } + + @Override + public long getDataBlockCount() { + return l1Cache.getDataBlockCount() + l2Cache.getDataBlockCount(); + } + + public static class CombinedCacheStats extends CacheStats { + private final CacheStats l1CacheStats; + private final CacheStats l2CacheStats; + + CombinedCacheStats(CacheStats l1Stats, CacheStats l2Stats) { + super("CombinedBlockCache"); + this.l1CacheStats = l1Stats; + this.l2CacheStats = l2Stats; + } + + public CacheStats getL1CacheStats() { + return this.l1CacheStats; + } + + public CacheStats getL2CacheStats() { + return this.l2CacheStats; + } + + public long getDataMissCount() { + return l1CacheStats.getDataMissCount() + l2CacheStats.getDataMissCount(); + } + + @Override + public long getLeafIndexMissCount() { + return l1CacheStats.getLeafIndexMissCount() + l2CacheStats.getLeafIndexMissCount(); + } + + @Override + public long getBloomChunkMissCount() { + return l1CacheStats.getBloomChunkMissCount() + l2CacheStats.getBloomChunkMissCount(); + } + + @Override + public long getMetaMissCount() { + return l1CacheStats.getMetaMissCount() + l2CacheStats.getMetaMissCount(); + } + + @Override + public long getRootIndexMissCount() { + return l1CacheStats.getRootIndexMissCount() + l2CacheStats.getRootIndexMissCount(); + } + + @Override + public long getIntermediateIndexMissCount() { + return l1CacheStats.getIntermediateIndexMissCount() + + l2CacheStats.getIntermediateIndexMissCount(); + } + + @Override + public long getFileInfoMissCount() { + return l1CacheStats.getFileInfoMissCount() + l2CacheStats.getFileInfoMissCount(); + } + + @Override + public long getGeneralBloomMetaMissCount() { + return l1CacheStats.getGeneralBloomMetaMissCount() + + l2CacheStats.getGeneralBloomMetaMissCount(); + } + + @Override + public long getDeleteFamilyBloomMissCount() { + return l1CacheStats.getDeleteFamilyBloomMissCount() + + l2CacheStats.getDeleteFamilyBloomMissCount(); + } + + @Override + public long getTrailerMissCount() { + return l1CacheStats.getTrailerMissCount() + l2CacheStats.getTrailerMissCount(); + } + + @Override + public long getDataHitCount() { + return l1CacheStats.getDataHitCount() + l2CacheStats.getDataHitCount(); + } + + @Override + public long getLeafIndexHitCount() { + return l1CacheStats.getLeafIndexHitCount() + l2CacheStats.getLeafIndexHitCount(); + } + + @Override + public long getBloomChunkHitCount() { + return l1CacheStats.getBloomChunkHitCount() + l2CacheStats.getBloomChunkHitCount(); + } + + @Override + public long getMetaHitCount() { + return l1CacheStats.getMetaHitCount() + l2CacheStats.getMetaHitCount(); + } + + @Override + public long getRootIndexHitCount() { + return l1CacheStats.getRootIndexHitCount() + l2CacheStats.getRootIndexHitCount(); + } + + @Override + public long getIntermediateIndexHitCount() { + return l1CacheStats.getIntermediateIndexHitCount() + + l2CacheStats.getIntermediateIndexHitCount(); + } + + @Override + public long getFileInfoHitCount() { + return l1CacheStats.getFileInfoHitCount() + l2CacheStats.getFileInfoHitCount(); + } + + @Override + public long getGeneralBloomMetaHitCount() { + return l1CacheStats.getGeneralBloomMetaHitCount() + + l2CacheStats.getGeneralBloomMetaHitCount(); + } + + @Override + public long getDeleteFamilyBloomHitCount() { + return l1CacheStats.getDeleteFamilyBloomHitCount() + + l2CacheStats.getDeleteFamilyBloomHitCount(); + } + + @Override + public long getTrailerHitCount() { + return l1CacheStats.getTrailerHitCount() + l2CacheStats.getTrailerHitCount(); + } + + @Override + public long getRequestCount() { + return l1CacheStats.getRequestCount() + + l2CacheStats.getRequestCount(); + } + + @Override + public long getRequestCachingCount() { + return l1CacheStats.getRequestCachingCount() + + l2CacheStats.getRequestCachingCount(); + } + + @Override + public long getMissCount() { + return l1CacheStats.getMissCount() + l2CacheStats.getMissCount(); + } + + @Override + public long getPrimaryMissCount() { + return l1CacheStats.getPrimaryMissCount() + l2CacheStats.getPrimaryMissCount(); + } + + @Override + public long getMissCachingCount() { + return l1CacheStats.getMissCachingCount() + + l2CacheStats.getMissCachingCount(); + } + + @Override + public long getHitCount() { + return l1CacheStats.getHitCount() + l2CacheStats.getHitCount(); + } + + @Override + public long getPrimaryHitCount() { + return l1CacheStats.getPrimaryHitCount() + l2CacheStats.getPrimaryHitCount(); + } + + @Override + public long getHitCachingCount() { + return l1CacheStats.getHitCachingCount() + + l2CacheStats.getHitCachingCount(); + } + + @Override + public long getEvictionCount() { + return l1CacheStats.getEvictionCount() + + l2CacheStats.getEvictionCount(); + } + + @Override + public long getEvictedCount() { + return l1CacheStats.getEvictedCount() + + l2CacheStats.getEvictedCount(); + } + + @Override + public long getPrimaryEvictedCount() { + return l1CacheStats.getPrimaryEvictedCount() + + l2CacheStats.getPrimaryEvictedCount(); + } + + @Override + public void rollMetricsPeriod() { + l1CacheStats.rollMetricsPeriod(); + l2CacheStats.rollMetricsPeriod(); + } + + @Override + public long getFailedInserts() { + return l1CacheStats.getFailedInserts() + l2CacheStats.getFailedInserts(); + } + + @Override + public long getSumHitCountsPastNPeriods() { + return l1CacheStats.getSumHitCountsPastNPeriods() + + l2CacheStats.getSumHitCountsPastNPeriods(); + } + + @Override + public long getSumRequestCountsPastNPeriods() { + return l1CacheStats.getSumRequestCountsPastNPeriods() + + l2CacheStats.getSumRequestCountsPastNPeriods(); + } + + @Override + public long getSumHitCachingCountsPastNPeriods() { + return l1CacheStats.getSumHitCachingCountsPastNPeriods() + + l2CacheStats.getSumHitCachingCountsPastNPeriods(); + } + + @Override + public long getSumRequestCachingCountsPastNPeriods() { + return l1CacheStats.getSumRequestCachingCountsPastNPeriods() + + l2CacheStats.getSumRequestCachingCountsPastNPeriods(); + } + } + + @Override + public Iterator iterator() { + return new BlockCachesIterator(getBlockCaches()); + } + + @Override + public BlockCache[] getBlockCaches() { + return new BlockCache[] {this.l1Cache, this.l2Cache}; + } + + @VisibleForTesting + public int getRpcRefCount(BlockCacheKey cacheKey) { + return (this.l2Cache instanceof BucketCache) + ? ((BucketCache) this.l2Cache).getRpcRefCount(cacheKey) + : 0; + } + + @Override + public boolean containsBlock(BlockCacheKey cacheKey) { + return this.l1Cache.containsBlock(cacheKey) || this.l2Cache.containsBlock(cacheKey); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBucketCache.java new file mode 100644 index 000000000000..585c5597a03b --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CompositeBucketCache.java @@ -0,0 +1,40 @@ +/** + * Copyright The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Private +public class CompositeBucketCache extends CompositeBlockCache { + public static final String IOENGINE_L1 = "hbase.bucketcache.l1.ioengine"; + public static final String IOENGINE_L2 = "hbase.bucketcache.l2.ioengine"; + public static final String CACHESIZE_L1 = "hbase.bucketcache.l1.size"; + public static final String CACHESIZE_L2 = "hbase.bucketcache.l2.size"; + public static final String WRITER_THREADS_L1 = "hbase.bucketcache.l1.writer.threads"; + public static final String WRITER_THREADS_L2 = "hbase.bucketcache.l2.writer.threads"; + public static final String WRITER_QUEUE_LENGTH_L1 = "hbase.bucketcache.l1.writer.queuelength"; + public static final String WRITER_QUEUE_LENGTH_L2 = "hbase.bucketcache.l2.writer.queuelength"; + public static final String PERSISTENT_PATH_L1 = "hbase.bucketcache.l1.persistent.path"; + public static final String PERSISTENT_PATH_L2 = "hbase.bucketcache.l2.persistent.path"; + + public CompositeBucketCache(BucketCache l1Cache, BucketCache l2Cache) { + super(l1Cache, l2Cache); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index 2416074a4cc9..1a1918a59403 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -1234,8 +1234,8 @@ public HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) } // Cache Miss, please load. - HFileBlock compressedBlock = - fsBlockReader.readBlockData(metaBlockOffset, blockSize, true, false, true); + HFileBlock compressedBlock = fsBlockReader.readBlockData(metaBlockOffset, blockSize, + true, false, shouldUseHeap(BlockType.META)); HFileBlock uncompressedBlock = compressedBlock.unpack(hfileContext, fsBlockReader); if (compressedBlock != uncompressedBlock) { compressedBlock.release(); @@ -1258,7 +1258,7 @@ public HFileBlock getMetaBlock(String metaBlockName, boolean cacheBlock) * boolean, boolean) */ private boolean shouldUseHeap(BlockType expectedBlockType) { - if (!cacheConf.getBlockCache().isPresent()) { + if (!cacheConf.getBlockCache().isPresent() || cacheConf.isCompositeBucketCache()) { return false; } else if (!cacheConf.isCombinedBlockCache()) { // Block to cache in LruBlockCache must be an heap one. So just allocate block memory from diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index a53227e26284..cf14fce1584a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -218,9 +218,7 @@ public class BucketCache implements BlockCache, HeapSize { }); /** Statistics thread schedule pool (for heavy debugging, could remove) */ - private transient final ScheduledExecutorService scheduleThreadPool = - Executors.newScheduledThreadPool(1, - new ThreadFactoryBuilder().setNameFormat("BucketCacheStatsExecutor").setDaemon(true).build()); + private transient final ScheduledExecutorService scheduleThreadPool; // Allocate or free space for the block private transient BucketAllocator bucketAllocator; @@ -253,15 +251,27 @@ public class BucketCache implements BlockCache, HeapSize { * */ private String algorithm; + @VisibleForTesting public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes, int writerThreadNum, int writerQLen, String persistencePath) throws IOException { this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen, - persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, HBaseConfiguration.create()); + persistencePath, DEFAULT_ERROR_TOLERATION_DURATION, CacheLevel.L2, + HBaseConfiguration.create()); } + @VisibleForTesting public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes, int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration, Configuration conf) throws IOException { + this(ioEngineName, capacity, blockSize, bucketSizes, writerThreadNum, writerQLen, + persistencePath, ioErrorsTolerationDuration, CacheLevel.L2, conf); + } + + public BucketCache(String ioEngineName, long capacity, int blockSize, int[] bucketSizes, + int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration, + CacheLevel level, Configuration conf) throws IOException { + scheduleThreadPool = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() + .setNameFormat("BucketCacheStatsExecutor-" + level).setDaemon(true).build()); boolean useStrongRef = conf.getBoolean(STRONG_REF_KEY, STRONG_REF_DEFAULT); if (useStrongRef) { this.offsetLock = new IdReadWriteLockStrongRef<>(); @@ -286,9 +296,10 @@ public BucketCache(String ioEngineName, long capacity, int blockSize, int[] buck sanityCheckConfigs(); - LOG.info("Instantiating BucketCache with acceptableFactor: " + acceptableFactor + ", minFactor: " + minFactor + - ", extraFreeFactor: " + extraFreeFactor + ", singleFactor: " + singleFactor + ", multiFactor: " + multiFactor + - ", memoryFactor: " + memoryFactor + ", useStrongRef: " + useStrongRef); + LOG.info("Instantiating BucketCache with acceptableFactor: {}, minFactor: {}," + + " extraFreeFactor: {}, singleFactor: {}, multiFactor: {}, memoryFactor: {}," + + " useStrongRef: {}, cacheLevel: {}", acceptableFactor, minFactor, extraFreeFactor, + singleFactor, multiFactor, memoryFactor, useStrongRef, level); this.cacheCapacity = capacity; this.persistencePath = persistencePath; @@ -326,11 +337,10 @@ public BucketCache(String ioEngineName, long capacity, int blockSize, int[] buck // every five minutes. this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this), statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS); - LOG.info("Started bucket cache; ioengine=" + ioEngineName + - ", capacity=" + StringUtils.byteDesc(capacity) + - ", blockSize=" + StringUtils.byteDesc(blockSize) + ", writerThreadNum=" + - writerThreadNum + ", writerQLen=" + writerQLen + ", persistencePath=" + - persistencePath + ", bucketAllocator=" + this.bucketAllocator.getClass().getName()); + LOG.info("Started bucket cache; cacheLevel={}, ioengine={}, capacity={}, blockSize={}," + + " writerThreadNum={}, writerQLen={}, persistencePath={}, bucketAllocator={}", + level, ioEngineName, StringUtils.byteDesc(capacity), StringUtils.byteDesc(blockSize), + writerThreadNum, writerQLen, persistencePath, this.bucketAllocator.getClass().getName()); } private void sanityCheckConfigs() { @@ -1526,6 +1536,11 @@ public BlockCache[] getBlockCaches() { return null; } + @Override + public boolean containsBlock(BlockCacheKey cacheKey) { + return ramCache.containsKey(cacheKey) || backingMap.containsKey(cacheKey); + } + @VisibleForTesting public int getRpcRefCount(BlockCacheKey cacheKey) { BucketEntry bucketEntry = backingMap.get(cacheKey); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java index 471eb469b7e5..0000e1aced54 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/util/MemorySizeUtil.java @@ -17,17 +17,21 @@ */ package org.apache.hadoop.hbase.io.util; +import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY; + import java.lang.management.ManagementFactory; import java.lang.management.MemoryType; import java.lang.management.MemoryUsage; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.hfile.BlockCache.CacheLevel; +import org.apache.hadoop.hbase.io.hfile.CompositeBucketCache; +import org.apache.hadoop.hbase.regionserver.MemStoreLAB; +import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.hbase.regionserver.MemStoreLAB; -import org.apache.hadoop.hbase.util.Pair; /** * Util class to calculate memory size for memstore, block cache(L1, L2) of RS. @@ -237,9 +241,19 @@ public static long getOnHeapCacheSize(final Configuration conf) { * @param conf used to read config for bucket cache size. (< 1 is treated as % and > is treated as MiB) * @return the number of bytes to use for bucket cache, negative if disabled. */ - public static long getBucketCacheSize(final Configuration conf) { + public static long getBucketCacheSize(final Configuration conf, final CacheLevel level) { // Size configured in MBs - float bucketCacheSize = conf.getFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 0F); + float bucketCacheSize; + switch(level) { + case L1: + bucketCacheSize = conf.getFloat(CompositeBucketCache.CACHESIZE_L1, 0F); + break; + case L2: + default: + bucketCacheSize = conf.getFloat(CompositeBucketCache.CACHESIZE_L2, + conf.getFloat(BUCKET_CACHE_SIZE_KEY, 0F)); + break; + } if (bucketCacheSize < 1) { throw new IllegalArgumentException("Bucket Cache should be minimum 1 MB in size." + "Configure 'hbase.bucketcache.size' with > 1 value"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java index ad261021c0d7..dca819a82533 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java @@ -161,9 +161,9 @@ private void initBlockCache() { if (this.cacheStats != null) { if (this.cacheStats instanceof CombinedBlockCache.CombinedCacheStats) { l1Stats = ((CombinedBlockCache.CombinedCacheStats) this.cacheStats) - .getLruCacheStats(); + .getL1CacheStats(); l2Stats = ((CombinedBlockCache.CombinedCacheStats) this.cacheStats) - .getBucketCacheStats(); + .getL2CacheStats(); } else { l1Stats = this.cacheStats; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index 5d66e9ac7df9..8c500199e70e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -326,7 +326,7 @@ public void testBucketCacheConfigL1L2Setup() { assertTrue(blockCache instanceof CombinedBlockCache); // TODO: Assert sizes allocated are right and proportions. CombinedBlockCache cbc = (CombinedBlockCache) blockCache; - FirstLevelBlockCache lbc = cbc.l1Cache; + FirstLevelBlockCache lbc = (FirstLevelBlockCache)cbc.l1Cache; assertEquals(lruExpectedSize, lbc.getMaxSize()); BlockCache bc = cbc.l2Cache; // getMaxSize comes back in bytes but we specified size in MB diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java index a086a3bda2e2..f94873e655fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCombinedBlockCache.java @@ -19,12 +19,9 @@ import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY; import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_SIZE_KEY; -import static org.junit.Assert.assertEquals; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.io.hfile.CombinedBlockCache.CombinedCacheStats; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Assert; import org.junit.ClassRule; @@ -32,84 +29,11 @@ import org.junit.experimental.categories.Category; @Category({SmallTests.class}) -public class TestCombinedBlockCache { - +public class TestCombinedBlockCache extends TestCompositeBlockCache { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestCombinedBlockCache.class); - private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); - - @Test - public void testCombinedCacheStats() { - CacheStats lruCacheStats = new CacheStats("lruCacheStats", 2); - CacheStats bucketCacheStats = new CacheStats("bucketCacheStats", 2); - CombinedCacheStats stats = - new CombinedCacheStats(lruCacheStats, bucketCacheStats); - - double delta = 0.01; - - // period 1: - // lru cache: 1 hit caching, 1 miss caching - // bucket cache: 2 hit non-caching,1 miss non-caching/primary,1 fail insert - lruCacheStats.hit(true, true, BlockType.DATA); - lruCacheStats.miss(true, false, BlockType.DATA); - bucketCacheStats.hit(false,true, BlockType.DATA); - bucketCacheStats.hit(false,true, BlockType.DATA); - bucketCacheStats.miss(false, true, BlockType.DATA); - - assertEquals(5, stats.getRequestCount()); - assertEquals(2, stats.getRequestCachingCount()); - assertEquals(2, stats.getMissCount()); - assertEquals(1, stats.getPrimaryMissCount()); - assertEquals(1, stats.getMissCachingCount()); - assertEquals(3, stats.getHitCount()); - assertEquals(3, stats.getPrimaryHitCount()); - assertEquals(1, stats.getHitCachingCount()); - assertEquals(0.6, stats.getHitRatio(), delta); - assertEquals(0.5, stats.getHitCachingRatio(), delta); - assertEquals(0.4, stats.getMissRatio(), delta); - assertEquals(0.5, stats.getMissCachingRatio(), delta); - - - // lru cache: 2 evicted, 1 evict - // bucket cache: 1 evict - lruCacheStats.evicted(1000, true); - lruCacheStats.evicted(1000, false); - lruCacheStats.evict(); - bucketCacheStats.evict(); - assertEquals(2, stats.getEvictionCount()); - assertEquals(2, stats.getEvictedCount()); - assertEquals(1, stats.getPrimaryEvictedCount()); - assertEquals(1.0, stats.evictedPerEviction(), delta); - - // lru cache: 1 fail insert - lruCacheStats.failInsert(); - assertEquals(1, stats.getFailedInserts()); - - // rollMetricsPeriod - stats.rollMetricsPeriod(); - assertEquals(3, stats.getSumHitCountsPastNPeriods()); - assertEquals(5, stats.getSumRequestCountsPastNPeriods()); - assertEquals(1, stats.getSumHitCachingCountsPastNPeriods()); - assertEquals(2, stats.getSumRequestCachingCountsPastNPeriods()); - assertEquals(0.6, stats.getHitRatioPastNPeriods(), delta); - assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta); - - // period 2: - // lru cache: 3 hit caching - lruCacheStats.hit(true, true, BlockType.DATA); - lruCacheStats.hit(true, true, BlockType.DATA); - lruCacheStats.hit(true, true, BlockType.DATA); - stats.rollMetricsPeriod(); - assertEquals(6, stats.getSumHitCountsPastNPeriods()); - assertEquals(8, stats.getSumRequestCountsPastNPeriods()); - assertEquals(4, stats.getSumHitCachingCountsPastNPeriods()); - assertEquals(5, stats.getSumRequestCachingCountsPastNPeriods()); - assertEquals(0.75, stats.getHitRatioPastNPeriods(), delta); - assertEquals(0.8, stats.getHitCachingRatioPastNPeriods(), delta); - } - @Test public void testMultiThreadGetAndEvictBlock() throws Exception { Configuration conf = UTIL.getConfiguration(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBlockCache.java new file mode 100644 index 000000000000..1debace4d379 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBlockCache.java @@ -0,0 +1,101 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import static org.junit.Assert.assertEquals; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.io.hfile.CompositeBlockCache.CombinedCacheStats; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({SmallTests.class}) +public abstract class TestCompositeBlockCache { + protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); + + @Test + public void testCombinedCacheStats() { + CacheStats lruCacheStats = new CacheStats("lruCacheStats", 2); + CacheStats bucketCacheStats = new CacheStats("bucketCacheStats", 2); + CombinedCacheStats stats = + new CombinedCacheStats(lruCacheStats, bucketCacheStats); + + double delta = 0.01; + + // period 1: + // lru cache: 1 hit caching, 1 miss caching + // bucket cache: 2 hit non-caching,1 miss non-caching/primary,1 fail insert + lruCacheStats.hit(true, true, BlockType.DATA); + lruCacheStats.miss(true, false, BlockType.DATA); + bucketCacheStats.hit(false,true, BlockType.DATA); + bucketCacheStats.hit(false,true, BlockType.DATA); + bucketCacheStats.miss(false, true, BlockType.DATA); + + assertEquals(5, stats.getRequestCount()); + assertEquals(2, stats.getRequestCachingCount()); + assertEquals(2, stats.getMissCount()); + assertEquals(1, stats.getPrimaryMissCount()); + assertEquals(1, stats.getMissCachingCount()); + assertEquals(3, stats.getHitCount()); + assertEquals(3, stats.getPrimaryHitCount()); + assertEquals(1, stats.getHitCachingCount()); + assertEquals(0.6, stats.getHitRatio(), delta); + assertEquals(0.5, stats.getHitCachingRatio(), delta); + assertEquals(0.4, stats.getMissRatio(), delta); + assertEquals(0.5, stats.getMissCachingRatio(), delta); + + + // lru cache: 2 evicted, 1 evict + // bucket cache: 1 evict + lruCacheStats.evicted(1000, true); + lruCacheStats.evicted(1000, false); + lruCacheStats.evict(); + bucketCacheStats.evict(); + assertEquals(2, stats.getEvictionCount()); + assertEquals(2, stats.getEvictedCount()); + assertEquals(1, stats.getPrimaryEvictedCount()); + assertEquals(1.0, stats.evictedPerEviction(), delta); + + // lru cache: 1 fail insert + lruCacheStats.failInsert(); + assertEquals(1, stats.getFailedInserts()); + + // rollMetricsPeriod + stats.rollMetricsPeriod(); + assertEquals(3, stats.getSumHitCountsPastNPeriods()); + assertEquals(5, stats.getSumRequestCountsPastNPeriods()); + assertEquals(1, stats.getSumHitCachingCountsPastNPeriods()); + assertEquals(2, stats.getSumRequestCachingCountsPastNPeriods()); + assertEquals(0.6, stats.getHitRatioPastNPeriods(), delta); + assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta); + + // period 2: + // lru cache: 3 hit caching + lruCacheStats.hit(true, true, BlockType.DATA); + lruCacheStats.hit(true, true, BlockType.DATA); + lruCacheStats.hit(true, true, BlockType.DATA); + stats.rollMetricsPeriod(); + assertEquals(6, stats.getSumHitCountsPastNPeriods()); + assertEquals(8, stats.getSumRequestCountsPastNPeriods()); + assertEquals(4, stats.getSumHitCachingCountsPastNPeriods()); + assertEquals(5, stats.getSumRequestCachingCountsPastNPeriods()); + assertEquals(0.75, stats.getHitRatioPastNPeriods(), delta); + assertEquals(0.8, stats.getHitCachingRatioPastNPeriods(), delta); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBucketCache.java new file mode 100644 index 000000000000..b5992dbf3a70 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCompositeBucketCache.java @@ -0,0 +1,48 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.junit.Assert; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({SmallTests.class}) +public class TestCompositeBucketCache extends TestCompositeBlockCache { + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestCompositeBucketCache.class); + + @Test + public void testMultiThreadGetAndEvictBlock() throws Exception { + Configuration conf = UTIL.getConfiguration(); + conf.setBoolean(BlockCacheFactory.BUCKET_CACHE_COMPOSITE_KEY, true); + conf.set(CompositeBucketCache.IOENGINE_L1, "offheap"); + conf.set(CompositeBucketCache.IOENGINE_L2, "offheap"); + conf.setInt(CompositeBucketCache.CACHESIZE_L1, 32); + conf.setInt(CompositeBucketCache.CACHESIZE_L2, 32); + BlockCache blockCache = BlockCacheFactory.createBlockCache(conf); + Assert.assertTrue(blockCache instanceof CompositeBucketCache); + TestLruBlockCache.testMultiThreadGetAndEvictBlockInternal(blockCache); + } + +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java index 8c0ccbda6c3e..0a9233e28f31 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHeapMemoryManager.java @@ -738,6 +738,11 @@ public BlockCache[] getBlockCaches() { public void setTestBlockSize(long testBlockSize) { this.testBlockSize = testBlockSize; } + + @Override + public boolean containsBlock(BlockCacheKey cacheKey) { + return false; + } } private static class MemstoreFlusherStub implements FlushRequester {