Skip to content

Commit

Permalink
HBASE-24659 Calculate FIXED_OVERHEAD automatically (#2018)
Browse files Browse the repository at this point in the history
Co-authored-by: niuyulin <niuyulin@xiaomi.com>

SIgned-off-by: Duo Zhang <zhangduo@apache.org>
Signed-off-by: stack <stack@apache.org>
  • Loading branch information
nyl3532016 authored Aug 6, 2020
1 parent 0ae125a commit f710d2d
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -37,13 +37,7 @@
*/
@InterfaceAudience.Private
public class HFileContext implements HeapSize, Cloneable {
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
// Algorithm, checksumType, encoding, Encryption.Context, hfileName reference,
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG +
//byte[] headers for column family and table name
2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE);
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileContext.class, false);

private static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;

/**
Expand All @@ -42,7 +41,8 @@ public BlockCacheKey(String hfileName, long offset) {
this(hfileName, offset, true, BlockType.DATA);
}

public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica, BlockType blockType) {
public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica,
BlockType blockType) {
this.isPrimaryReplicaBlock = isPrimaryReplica;
this.hfileName = hfileName;
this.offset = offset;
Expand Down Expand Up @@ -71,12 +71,7 @@ public String toString() {
return this.hfileName + '_' + this.offset;
}

public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT +
Bytes.SIZEOF_BOOLEAN +
ClassSize.REFERENCE + // this.hfileName
ClassSize.REFERENCE + // this.blockType
Bytes.SIZEOF_LONG); // this.offset
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(BlockCacheKey.class, false);

/**
* Strings have two bytes per character due to default Java Unicode encoding
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,14 +113,7 @@
@InterfaceAudience.Private
public class HFileBlock implements Cacheable {
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
// BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator
5 * ClassSize.REFERENCE +
// On-disk size, uncompressed size, and next block's on-disk size
// bytePerChecksum and onDiskDataSize
4 * Bytes.SIZEOF_INT +
// This and previous block offset
2 * Bytes.SIZEOF_LONG);
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileBlock.class, false);

// Block Header fields.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
Expand Down Expand Up @@ -435,7 +434,7 @@ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory)
map.put(cacheKey, cb);
long val = elements.incrementAndGet();
if (buf.getBlockType().isData()) {
dataBlockElements.increment();
dataBlockElements.increment();
}
if (LOG.isTraceEnabled()) {
long size = map.size();
Expand Down Expand Up @@ -492,7 +491,7 @@ private long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
heapsize *= -1;
}
if (bt != null && bt.isData()) {
dataBlockSize.add(heapsize);
dataBlockSize.add(heapsize);
}
return size.addAndGet(heapsize);
}
Expand Down Expand Up @@ -578,8 +577,9 @@ public int evictBlocksByHfileName(String hfileName) {
int numEvicted = 0;
for (BlockCacheKey key : map.keySet()) {
if (key.getHfileName().equals(hfileName)) {
if (evictBlock(key))
if (evictBlock(key)) {
++numEvicted;
}
}
}
if (victimHandler != null) {
Expand Down Expand Up @@ -652,7 +652,9 @@ long getOverhead() {
void evict() {

// Ensure only one eviction at a time
if(!evictionLock.tryLock()) return;
if (!evictionLock.tryLock()) {
return;
}

try {
evictionInProgress = true;
Expand All @@ -665,7 +667,9 @@ void evict() {
StringUtils.byteDesc(currentSize));
}

if (bytesToFree <= 0) return;
if (bytesToFree <= 0) {
return;
}

// Instantiate priority buckets
BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize());
Expand Down Expand Up @@ -940,7 +944,9 @@ public void run() {
}
}
LruBlockCache cache = this.cache.get();
if (cache == null) break;
if (cache == null) {
break;
}
cache.evict();
}
}
Expand Down Expand Up @@ -1017,10 +1023,8 @@ public CacheStats getStats() {
return this.stats;
}

public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
(4 * Bytes.SIZEOF_LONG) + (11 * ClassSize.REFERENCE) +
(6 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN)
+ ClassSize.OBJECT);
public final static long CACHE_FIXED_OVERHEAD =
ClassSize.estimateBase(LruBlockCache.class, false);

@Override
public long heapSize() {
Expand Down Expand Up @@ -1088,9 +1092,13 @@ public String getFilename() {
@Override
public int compareTo(CachedBlock other) {
int diff = this.getFilename().compareTo(other.getFilename());
if (diff != 0) return diff;
if (diff != 0) {
return diff;
}
diff = Long.compare(this.getOffset(), other.getOffset());
if (diff != 0) return diff;
if (diff != 0) {
return diff;
}
if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8456,12 +8456,7 @@ private static List<Cell> sort(List<Cell> cells, final CellComparator comparator
return cells;
}

public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT +
56 * ClassSize.REFERENCE +
3 * Bytes.SIZEOF_INT +
14 * Bytes.SIZEOF_LONG +
3 * Bytes.SIZEOF_BOOLEAN);
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HRegion.class, false);

// woefully out of date - currently missing:
// 1 x HashMap - coprocessorServiceHandlers
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2566,9 +2566,7 @@ public CacheConfig getCacheConfig() {
return this.cacheConf;
}

public static final long FIXED_OVERHEAD =
ClassSize.align(ClassSize.OBJECT + (29 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)
+ (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HStore.class, false);

public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD
+ ClassSize.OBJECT + ClassSize.REENTRANT_LOCK
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -602,5 +602,19 @@ public void testObjectSize() throws IOException {
assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
}
}
}

@Test
public void testAutoCalcFixedOverHead() {
Class[] classList = new Class[] { HFileContext.class, HRegion.class, BlockCacheKey.class,
HFileBlock.class, HStore.class, LruBlockCache.class };
for (Class cl : classList) {
// do estimate in advance to ensure class is loaded
ClassSize.estimateBase(cl, false);

long startTime = System.currentTimeMillis();
ClassSize.estimateBase(cl, false);
long endTime = System.currentTimeMillis();
assertTrue(endTime - startTime < 5);
}
}
}

0 comments on commit f710d2d

Please sign in to comment.