HBASE-1554 TestHeapSize failing on Hudson; second fix

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@789491 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-06-29 23:29:03 +00:00
parent 69f71c7d80
commit 9880f155e2
4 changed files with 125 additions and 19 deletions

View File

@ -33,13 +33,12 @@ import org.apache.hadoop.hbase.util.ClassSize;
* either instantiating as in-memory or handling the transition from single
* to multiple access.
*/
class CachedBlock implements HeapSize, Comparable<CachedBlock> {
public class CachedBlock implements HeapSize, Comparable<CachedBlock> {
public final static long PER_BLOCK_OVERHEAD = ClassSize.align(
ClassSize.OBJECT + (2 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) +
ClassSize.OBJECT + (3 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) +
ClassSize.STRING + ClassSize.BYTE_BUFFER);
static enum BlockPriority {
/**
* Accessed a single time (used for scan-resistance)

View File

@ -626,10 +626,16 @@ public class LruBlockCache implements BlockCache, HeapSize {
}
public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
(7 * Bytes.SIZEOF_LONG) + (5 * ClassSize.OBJECT) + Bytes.SIZEOF_BOOLEAN);
(3 * Bytes.SIZEOF_LONG) + (8 * ClassSize.REFERENCE) +
(5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
+ ClassSize.OBJECT);
public final static long CACHE_FUDGE_FACTOR = 1024 * 10; // 10k
public final static long MAP_FIXED_OVERHEAD = ClassSize.align(
(2 * Bytes.SIZEOF_INT) + ClassSize.ARRAY + (6 * ClassSize.REFERENCE) +
ClassSize.OBJECT);
public final static long MAP_SEGMENT_OVERHEAD = ClassSize.align(
ClassSize.REFERENCE + ClassSize.OBJECT + (3 * Bytes.SIZEOF_INT) +
Bytes.SIZEOF_FLOAT + ClassSize.ARRAY);

View File

@ -82,24 +82,24 @@ public class ClassSize {
REFERENCE = 4;
}
OBJECT = 2 * REFERENCE;
ARRAY = 3 * REFERENCE;
ARRAYLIST = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT +
align(Bytes.SIZEOF_INT));
ARRAYLIST = align(OBJECT + align(REFERENCE) + align(ARRAY) +
(2 * Bytes.SIZEOF_INT));
BYTE_BUFFER = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT +
3 * Bytes.SIZEOF_BOOLEAN + 4 * Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG);
BYTE_BUFFER = align(OBJECT + align(REFERENCE) + align(ARRAY) +
(5 * Bytes.SIZEOF_INT) +
(3 * Bytes.SIZEOF_BOOLEAN) + Bytes.SIZEOF_LONG);
INTEGER = align(OBJECT + Bytes.SIZEOF_INT);
MAP_ENTRY = align(OBJECT + 5 * REFERENCE + Bytes.SIZEOF_BOOLEAN);
OBJECT = 2 * REFERENCE;
TREEMAP = align(OBJECT + (2 * Bytes.SIZEOF_INT) + align(7 * REFERENCE));
TREEMAP = align(OBJECT + 2 * Bytes.SIZEOF_INT + (5+2) * REFERENCE +
ClassSize.align(OBJECT + Bytes.SIZEOF_INT));
STRING = align(OBJECT + REFERENCE + 3 * Bytes.SIZEOF_INT);
STRING = align(OBJECT + align(ARRAY) + 3 * Bytes.SIZEOF_INT);
}
/**

View File

@ -1,11 +1,17 @@
package org.apache.hadoop.hbase.io;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.hfile.CachedBlock;
import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@ -22,6 +28,81 @@ public class TestHeapSize extends TestCase {
// BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey
// KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
/**
* Test our hard-coded sizing of native java objects
*/
@SuppressWarnings("unchecked")
public void testNativeSizes() throws IOException {
Class cl = null;
long expected = 0L;
long actual = 0L;
// ArrayList
cl = ArrayList.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.ARRAYLIST;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// ByteBuffer
cl = ByteBuffer.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.BYTE_BUFFER;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// Integer
cl = Integer.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.INTEGER;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// Map.Entry
// Interface is public, all others are not. Hard to size via ClassSize
// cl = Map.Entry.class;
// expected = ClassSize.estimateBase(cl, false);
// actual = ClassSize.MAP_ENTRY;
// if(expected != actual) {
// ClassSize.estimateBase(cl, true);
// assertEquals(expected, actual);
// }
// Object
cl = Object.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.OBJECT;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// TreeMap
cl = TreeMap.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.TREEMAP;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// String
cl = String.class;
expected = ClassSize.estimateBase(cl, false);
actual = ClassSize.STRING;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
/**
* Testing the classes that implements HeapSize and are a part of 0.20.
* Some are not tested here for example BlockIndex which is tested in
@ -37,7 +118,6 @@ public class TestHeapSize extends TestCase {
//KeyValue
cl = KeyValue.class;
expected = ClassSize.estimateBase(cl, false);
KeyValue kv = new KeyValue();
actual = kv.heapSize();
if(expected != actual) {
@ -45,18 +125,39 @@ public class TestHeapSize extends TestCase {
assertEquals(expected, actual);
}
//LruBlockCache
//LruBlockCache Overhead
cl = LruBlockCache.class;
actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
LruBlockCache c = new LruBlockCache(102400,1024);
//Since minimum size for the for a LruBlockCache is 1
//we need to remove one reference from the heapsize
actual = c.heapSize();// - ClassSize.REFERENCE_SIZE;
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// LruBlockCache Map Fixed Overhead
cl = ConcurrentHashMap.class;
actual = LruBlockCache.MAP_FIXED_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
if(expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
// CachedBlock Fixed Overhead
// We really need "deep" sizing but ClassSize does not do this.
// Perhaps we should do all these more in this style....
cl = CachedBlock.class;
actual = CachedBlock.PER_BLOCK_OVERHEAD;
expected = ClassSize.estimateBase(cl, false);
expected += ClassSize.estimateBase(String.class, false);
expected += ClassSize.estimateBase(ByteBuffer.class, false);
if(expected != actual) {
ClassSize.estimateBase(cl, true);
ClassSize.estimateBase(String.class, true);
ClassSize.estimateBase(ByteBuffer.class, true);
assertEquals(expected, actual);
}
//Put
cl = Put.class;
expected = ClassSize.estimateBase(cl, false);