HBASE-22435 Add a UT to address the HFileBlock#heapSize() in TestHeapSize
This commit is contained in:
parent
b00360731a
commit
9e5fc2b379
|
@ -34,6 +34,11 @@ import org.apache.yetus.audience.InterfaceAudience;
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HFileContext implements HeapSize, Cloneable {
|
public class HFileContext implements HeapSize, Cloneable {
|
||||||
|
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
|
||||||
|
// Algorithm, checksumType, encoding, Encryption.Context, hfileName reference
|
||||||
|
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
|
||||||
|
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
|
||||||
|
4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG);
|
||||||
|
|
||||||
public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
|
public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
|
||||||
|
|
||||||
|
@ -188,19 +193,13 @@ public class HFileContext implements HeapSize, Cloneable {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HeapSize implementation
|
* HeapSize implementation. NOTE : The heapsize should be altered as and when new state variable
|
||||||
* NOTE : The heapsize should be altered as and when new state variable are added
|
* are added
|
||||||
* @return heap size of the HFileContext
|
* @return heap size of the HFileContext
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public long heapSize() {
|
public long heapSize() {
|
||||||
long size = ClassSize.align(ClassSize.OBJECT +
|
long size = FIXED_OVERHEAD;
|
||||||
// Algorithm reference, encodingon, checksumtype, Encryption.Context reference
|
|
||||||
5 * ClassSize.REFERENCE +
|
|
||||||
2 * Bytes.SIZEOF_INT +
|
|
||||||
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
|
|
||||||
4 * Bytes.SIZEOF_BOOLEAN +
|
|
||||||
Bytes.SIZEOF_LONG);
|
|
||||||
if (this.hfileName != null) {
|
if (this.hfileName != null) {
|
||||||
size += ClassSize.STRING + this.hfileName.length();
|
size += ClassSize.STRING + this.hfileName.length();
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,6 +113,14 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HFileBlock implements Cacheable {
|
public class HFileBlock implements Cacheable {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
|
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
|
||||||
|
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
|
||||||
|
// BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator
|
||||||
|
5 * ClassSize.REFERENCE +
|
||||||
|
// On-disk size, uncompressed size, and next block's on-disk size
|
||||||
|
// bytePerChecksum and onDiskDataSize
|
||||||
|
4 * Bytes.SIZEOF_INT +
|
||||||
|
// This and previous block offset
|
||||||
|
2 * Bytes.SIZEOF_LONG);
|
||||||
|
|
||||||
// Block Header fields.
|
// Block Header fields.
|
||||||
|
|
||||||
|
@ -740,24 +748,12 @@ public class HFileBlock implements Cacheable {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long heapSize() {
|
public long heapSize() {
|
||||||
long size = ClassSize.align(
|
long size = FIXED_OVERHEAD;
|
||||||
ClassSize.OBJECT +
|
size += fileContext.heapSize();
|
||||||
// Block type, multi byte buffer, MemoryType and meta references
|
|
||||||
4 * ClassSize.REFERENCE +
|
|
||||||
// On-disk size, uncompressed size, and next block's on-disk size
|
|
||||||
// bytePerChecksum and onDiskDataSize
|
|
||||||
4 * Bytes.SIZEOF_INT +
|
|
||||||
// This and previous block offset
|
|
||||||
2 * Bytes.SIZEOF_LONG +
|
|
||||||
// Heap size of the meta object. meta will be always not null.
|
|
||||||
fileContext.heapSize()
|
|
||||||
);
|
|
||||||
|
|
||||||
if (buf != null) {
|
if (buf != null) {
|
||||||
// Deep overhead of the byte buffer. Needs to be aligned separately.
|
// Deep overhead of the byte buffer. Needs to be aligned separately.
|
||||||
size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);
|
size += ClassSize.align(buf.capacity() + MULTI_BYTE_BUFFER_HEAP_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ClassSize.align(size);
|
return ClassSize.align(size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -43,6 +43,8 @@ import org.apache.hadoop.hbase.client.Delete;
|
||||||
import org.apache.hadoop.hbase.client.Mutation;
|
import org.apache.hadoop.hbase.client.Mutation;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
|
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
|
||||||
|
import org.apache.hadoop.hbase.io.hfile.HFileBlock;
|
||||||
|
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
||||||
import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
|
import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
|
||||||
import org.apache.hadoop.hbase.io.hfile.LruCachedBlock;
|
import org.apache.hadoop.hbase.io.hfile.LruCachedBlock;
|
||||||
import org.apache.hadoop.hbase.regionserver.CSLMImmutableSegment;
|
import org.apache.hadoop.hbase.regionserver.CSLMImmutableSegment;
|
||||||
|
@ -515,6 +517,20 @@ public class TestHeapSize {
|
||||||
// any of these classes are modified without updating overhead sizes.
|
// any of these classes are modified without updating overhead sizes.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHFileBlockSize() throws IOException {
|
||||||
|
long expected;
|
||||||
|
long actual;
|
||||||
|
|
||||||
|
actual = HFileContext.FIXED_OVERHEAD;
|
||||||
|
expected = ClassSize.estimateBase(HFileContext.class, false);
|
||||||
|
assertEquals(expected, actual);
|
||||||
|
|
||||||
|
actual = HFileBlock.FIXED_OVERHEAD;
|
||||||
|
expected = ClassSize.estimateBase(HFileBlock.class, false);
|
||||||
|
assertEquals(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMutations(){
|
public void testMutations(){
|
||||||
Class<?> cl;
|
Class<?> cl;
|
||||||
|
|
Loading…
Reference in New Issue