HBASE-1591 HBASE-1554 broke org.apache.hadoop.hbase.io.hfile.TestLruBlockCache.testResizeBlockCache

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@789537 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-06-30 02:42:28 +00:00
parent 9880f155e2
commit ac9b0d924f
4 changed files with 90 additions and 64 deletions

View File

@ -212,8 +212,8 @@ public class LruBlockCache implements BlockCache, HeapSize {
this.stats = new CacheStats();
this.count = new AtomicLong(0);
this.elements = new AtomicLong(0);
this.overhead = getOverhead(maxSize, blockSize, mapConcurrencyLevel);
this.size = new AtomicLong(0);
this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
this.size = new AtomicLong(this.overhead);
if(evictionThread) {
this.evictionThread = new EvictionThread(this);
this.evictionThread.start();
@ -630,33 +630,16 @@ public class LruBlockCache implements BlockCache, HeapSize {
(5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
+ ClassSize.OBJECT);
public final static long CACHE_FUDGE_FACTOR = 1024 * 10; // 10k
public final static long MAP_FIXED_OVERHEAD = ClassSize.align(
(2 * Bytes.SIZEOF_INT) + ClassSize.ARRAY + (6 * ClassSize.REFERENCE) +
ClassSize.OBJECT);
public final static long MAP_SEGMENT_OVERHEAD = ClassSize.align(
ClassSize.REFERENCE + ClassSize.OBJECT + (3 * Bytes.SIZEOF_INT) +
Bytes.SIZEOF_FLOAT + ClassSize.ARRAY);
public final static long MAP_ENTRY_OVERHEAD = ClassSize.align(
ClassSize.REFERENCE + ClassSize.OBJECT + (3 * ClassSize.REFERENCE) +
(2 * Bytes.SIZEOF_INT));
// HeapSize implementation
public long heapSize() {
return getCurrentSize() + overhead;
}
public long cacheSize() {
return getCurrentSize();
}
public static long getOverhead(long maxSize, long blockSize, int concurrency){
return CACHE_FIXED_OVERHEAD + CACHE_FUDGE_FACTOR +
((int)Math.ceil(maxSize*1.2/blockSize) * MAP_ENTRY_OVERHEAD) +
(concurrency * MAP_SEGMENT_OVERHEAD);
public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
((int)Math.ceil(maxSize*1.2/blockSize)
* ClassSize.CONCURRENT_HASHMAP_ENTRY) +
(concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
}
// Simple calculators of sizes given factors and maxSize

View File

@ -64,6 +64,15 @@ public class ClassSize {
/** Overhead for TreeMap */
public static int TREEMAP = 0;
/** Overhead for ConcurrentHashMap */
public static int CONCURRENT_HASHMAP = 0;
/** Overhead for ConcurrentHashMap.Entry */
public static int CONCURRENT_HASHMAP_ENTRY = 0;
/** Overhead for ConcurrentHashMap.Segment */
public static int CONCURRENT_HASHMAP_SEGMENT = 0;
private static final String THIRTY_TWO = "32";
@ -86,7 +95,7 @@ public class ClassSize {
ARRAY = 3 * REFERENCE;
ARRAYLIST = align(OBJECT + align(REFERENCE) + align(ARRAY) +
ARRAYLIST = align(OBJECT + align(REFERENCE) + align(ARRAY) +
(2 * Bytes.SIZEOF_INT));
BYTE_BUFFER = align(OBJECT + align(REFERENCE) + align(ARRAY) +
@ -99,7 +108,16 @@ public class ClassSize {
TREEMAP = align(OBJECT + (2 * Bytes.SIZEOF_INT) + align(7 * REFERENCE));
STRING = align(OBJECT + align(ARRAY) + 3 * Bytes.SIZEOF_INT);
STRING = align(OBJECT + ARRAY + REFERENCE + 3 * Bytes.SIZEOF_INT);
CONCURRENT_HASHMAP = align((2 * Bytes.SIZEOF_INT) + ARRAY +
(6 * REFERENCE) + OBJECT);
CONCURRENT_HASHMAP_ENTRY = align(REFERENCE + OBJECT + (3 * REFERENCE) +
(2 * Bytes.SIZEOF_INT));
CONCURRENT_HASHMAP_SEGMENT = align(REFERENCE + OBJECT +
(3 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_FLOAT + ARRAY);
}
/**
@ -186,8 +204,8 @@ public class ClassSize {
if (LOG.isDebugEnabled()) {
// Write out region name as string and its encoded name.
LOG.debug("Primitives " + coeff[0] + ", arrays " + coeff[1] +
", references(inlcuding " + nrOfRefsPerObj +
", for object overhead) " + coeff[2] + ", refSize " + REFERENCE +
", references(includes " + nrOfRefsPerObj +
" for object overhead) " + coeff[2] + ", refSize " + REFERENCE +
", size " + size);
}
}

View File

@ -136,7 +136,7 @@ public class TestHeapSize extends TestCase {
// LruBlockCache Map Fixed Overhead
cl = ConcurrentHashMap.class;
actual = LruBlockCache.MAP_FIXED_OVERHEAD;
actual = ClassSize.CONCURRENT_HASHMAP;
expected = ClassSize.estimateBase(cl, false);
if(expected != actual) {
ClassSize.estimateBase(cl, true);

View File

@ -39,17 +39,15 @@ public class TestLruBlockCache extends TestCase {
public void testBackgroundEvictionThread() throws Exception {
long maxSize = 100000;
long blockSize = calculateBlockSizeDefault(maxSize, 9); // room for 9, will evict
LruBlockCache cache = new LruBlockCache(maxSize,100);
LruBlockCache cache = new LruBlockCache(maxSize,blockSize);
Block [] blocks = generateFixedBlocks(10, 10000);
long expectedCacheSize = 0;
Block [] blocks = generateFixedBlocks(10, blockSize, "block");
// Add all the blocks
for(Block block : blocks) {
cache.cacheBlock(block.blockName, block.buf);
expectedCacheSize += block.heapSize();
}
// Let the eviction run
@ -67,14 +65,15 @@ public class TestLruBlockCache extends TestCase {
public void testCacheSimple() throws Exception {
LruBlockCache cache = new LruBlockCache(1000000,10000);
long maxSize = 1000000;
long blockSize = calculateBlockSizeDefault(maxSize, 101);
Block [] blocks = generateRandomBlocks(10, 10000);
long emptyCacheSize = cache.heapSize();
long expectedCacheSize = emptyCacheSize;
LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
Block [] blocks = generateRandomBlocks(100, blockSize);
long expectedCacheSize = cache.heapSize();
// Confirm empty
for(Block block : blocks) {
assertTrue(cache.getBlock(block.blockName) == null);
@ -115,17 +114,21 @@ public class TestLruBlockCache extends TestCase {
assertTrue(buf != null);
assertEquals(buf.capacity(), block.buf.capacity());
}
// Expect no evictions
assertEquals(0, cache.getEvictionCount());
}
public void testCacheEvictionSimple() throws Exception {
long maxSize = 100000;
long blockSize = calculateBlockSizeDefault(maxSize, 10);
LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
LruBlockCache cache = new LruBlockCache(maxSize,100,false);
Block [] blocks = generateFixedBlocks(10, blockSize, "block");
Block [] blocks = generateFixedBlocks(10, 10000);
long expectedCacheSize = 0;
long expectedCacheSize = cache.heapSize();
// Add all the blocks
for(Block block : blocks) {
@ -134,17 +137,17 @@ public class TestLruBlockCache extends TestCase {
}
// A single eviction run should have occurred
assertEquals(cache.getEvictionCount(), 1);
assertEquals(1, cache.getEvictionCount());
// Our expected size overruns acceptable limit
assertTrue(expectedCacheSize >
(maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
// But the cache did not grow beyond max
assertTrue(cache.cacheSize() < maxSize);
assertTrue(cache.heapSize() < maxSize);
// And is still below the acceptable limit
assertTrue(cache.cacheSize() <
assertTrue(cache.heapSize() <
(maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
// All blocks except block 0 and 1 should be in the cache
@ -159,13 +162,14 @@ public class TestLruBlockCache extends TestCase {
public void testCacheEvictionTwoPriorities() throws Exception {
long maxSize = 100000;
long blockSize = calculateBlockSizeDefault(maxSize, 10);
LruBlockCache cache = new LruBlockCache(maxSize,100,false);
LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
Block [] singleBlocks = generateFixedBlocks(5, 10000, "single");
Block [] multiBlocks = generateFixedBlocks(5, 10000, "multi");
long expectedCacheSize = 0;
long expectedCacheSize = cache.heapSize();
// Add and get the multi blocks
for(Block block : multiBlocks) {
@ -191,10 +195,10 @@ public class TestLruBlockCache extends TestCase {
(maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
// But the cache did not grow beyond max
assertTrue(cache.cacheSize() <= maxSize);
assertTrue(cache.heapSize() <= maxSize);
// And is now below the acceptable limit
assertTrue(cache.cacheSize() <=
assertTrue(cache.heapSize() <=
(maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
// We expect fairness across the two priorities.
@ -216,7 +220,7 @@ public class TestLruBlockCache extends TestCase {
public void testCacheEvictionThreePriorities() throws Exception {
long maxSize = 100000;
long blockSize = 9800;
long blockSize = calculateBlockSize(maxSize, 10);
LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
(int)Math.ceil(1.2*maxSize/blockSize),
@ -228,12 +232,12 @@ public class TestLruBlockCache extends TestCase {
0.33f, // multi
0.34f);// memory
Block [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
Block [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
Block [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory");
long expectedCacheSize = 0;
long expectedCacheSize = cache.heapSize();
// Add 3 blocks from each priority
for(int i=0;i<3;i++) {
@ -257,7 +261,7 @@ public class TestLruBlockCache extends TestCase {
assertEquals(0, cache.getEvictionCount());
// Verify cache size
assertEquals(expectedCacheSize, cache.cacheSize());
assertEquals(expectedCacheSize, cache.heapSize());
// Insert a single block, oldest single should be evicted
cache.cacheBlock(singleBlocks[3].blockName, singleBlocks[3].buf);
@ -339,7 +343,7 @@ public class TestLruBlockCache extends TestCase {
public void testScanResistance() throws Exception {
long maxSize = 100000;
long blockSize = 9800;
long blockSize = calculateBlockSize(maxSize, 10);
LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
(int)Math.ceil(1.2*maxSize/blockSize),
@ -401,7 +405,7 @@ public class TestLruBlockCache extends TestCase {
public void testResizeBlockCache() throws Exception {
long maxSize = 300000;
long blockSize = 9750;
long blockSize = calculateBlockSize(maxSize, 31);
LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
(int)Math.ceil(1.2*maxSize/blockSize),
@ -435,7 +439,7 @@ public class TestLruBlockCache extends TestCase {
assertEquals(0, cache.getEvictionCount());
// Resize to half capacity plus an extra block (otherwise we evict an extra)
cache.setMaxSize((long)(maxSize * 0.5f) + blockSize);
cache.setMaxSize((long)(maxSize * 0.5f));
// Should have run a single eviction
assertEquals(1, cache.getEvictionCount());
@ -470,19 +474,40 @@ public class TestLruBlockCache extends TestCase {
return generateFixedBlocks(numBlocks, (int)size, pfx);
}
private Block [] generateFixedBlocks(int numBlocks, int size) {
return generateFixedBlocks(numBlocks, size, "block");
}
private Block [] generateRandomBlocks(int numBlocks, int maxSize) {
private Block [] generateRandomBlocks(int numBlocks, long maxSize) {
Block [] blocks = new Block[numBlocks];
Random r = new Random();
for(int i=0;i<numBlocks;i++) {
blocks[i] = new Block("block" + i, r.nextInt(maxSize)+1);
blocks[i] = new Block("block" + i, r.nextInt((int)maxSize)+1);
}
return blocks;
}
private long calculateBlockSize(long maxSize, int numBlocks) {
long roughBlockSize = (long)Math.ceil(maxSize/numBlocks);
int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
ClassSize.CONCURRENT_HASHMAP +
(numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
(LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
long negateBlockSize = (long)Math.ceil(totalOverhead/numEntries);
negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f));
}
private long calculateBlockSizeDefault(long maxSize, int numBlocks) {
long roughBlockSize = (long)Math.ceil(maxSize/numBlocks);
int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
ClassSize.CONCURRENT_HASHMAP +
(numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
(LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
long negateBlockSize = (long)Math.ceil(totalOverhead/numEntries);
negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*
LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
}
private class Block implements HeapSize {
String blockName;
ByteBuffer buf;