diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index b5ccda21a44..6074f10df26 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -34,6 +34,11 @@ import org.apache.yetus.audience.InterfaceAudience; */ @InterfaceAudience.Private public class HFileContext implements HeapSize, Cloneable { + public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + + // Algorithm, checksumType, encoding, Encryption.Context, hfileName reference + 5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT + + // usesHBaseChecksum, includesMvcc, includesTags and compressTags + 4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG); public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; @@ -188,19 +193,13 @@ public class HFileContext implements HeapSize, Cloneable { } /** - * HeapSize implementation - * NOTE : The heapsize should be altered as and when new state variable are added + * HeapSize implementation. NOTE : The heapsize should be altered as and when new state variable + * are added * @return heap size of the HFileContext */ @Override public long heapSize() { - long size = ClassSize.align(ClassSize.OBJECT + - // Algorithm reference, encodingon, checksumtype, Encryption.Context reference - 5 * ClassSize.REFERENCE + - 2 * Bytes.SIZEOF_INT + - // usesHBaseChecksum, includesMvcc, includesTags and compressTags - 4 * Bytes.SIZEOF_BOOLEAN + - Bytes.SIZEOF_LONG); + long size = FIXED_OVERHEAD; if (this.hfileName != null) { size += ClassSize.STRING + this.hfileName.length(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 8ab5649735f..c1a071bf27f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -113,6 +113,14 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; @InterfaceAudience.Private public class HFileBlock implements Cacheable { private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class); + public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + + // BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator + 5 * ClassSize.REFERENCE + + // On-disk size, uncompressed size, and next block's on-disk size + // bytePerChecksum and onDiskDataSize + 4 * Bytes.SIZEOF_INT + + // This and previous block offset + 2 * Bytes.SIZEOF_LONG); // Block Header fields. @@ -740,24 +748,12 @@ public class HFileBlock implements Cacheable { @Override public long heapSize() { - long size = ClassSize.align( - ClassSize.OBJECT + - // Block type, multi byte buffer, MemoryType and meta references - 4 * ClassSize.REFERENCE + - // On-disk size, uncompressed size, and next block's on-disk size - // bytePerChecksum and onDiskDataSize - 4 * Bytes.SIZEOF_INT + - // This and previous block offset - 2 * Bytes.SIZEOF_LONG + - // Heap size of the meta object. meta will be always not null. - fileContext.heapSize() - ); - + long size = FIXED_OVERHEAD; + size += fileContext.heapSize(); if (buf != null) { // Deep overhead of the byte buffer. Needs to be aligned separately. size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE); } - return ClassSize.align(size); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java index 993503d567b..71ffb87c266 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java @@ -43,6 +43,8 @@ import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; +import org.apache.hadoop.hbase.io.hfile.HFileBlock; +import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.LruBlockCache; import org.apache.hadoop.hbase.io.hfile.LruCachedBlock; import org.apache.hadoop.hbase.regionserver.CSLMImmutableSegment; @@ -515,6 +517,20 @@ public class TestHeapSize { // any of these classes are modified without updating overhead sizes. } + @Test + public void testHFileBlockSize() throws IOException { + long expected; + long actual; + + actual = HFileContext.FIXED_OVERHEAD; + expected = ClassSize.estimateBase(HFileContext.class, false); + assertEquals(expected, actual); + + actual = HFileBlock.FIXED_OVERHEAD; + expected = ClassSize.estimateBase(HFileBlock.class, false); + assertEquals(expected, actual); + } + @Test public void testMutations(){ Class cl;