HBASE-10263 make LruBlockCache single/multi/in-memory ratio user-configurable and provide preemptive mode for in-memory type block (Feng Honghua)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1556703 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
liangxie 2014-01-09 02:58:35 +00:00
parent 3ad57c7195
commit ce1641dd80
2 changed files with 187 additions and 30 deletions

View File

@ -101,6 +101,16 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
/**
* Configuration key to force data-block always(except in-memory are too much)
* cached in memory for in-memory hfile, unlike inMemory, which is a column-family
* configuration, inMemoryForceMode is a cluster-wide configuration
*/
static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
/** Default Configuration Parameters*/
@ -117,6 +127,8 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
static final float DEFAULT_MULTI_FACTOR = 0.50f;
static final float DEFAULT_MEMORY_FACTOR = 0.25f;
static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
/** Statistics thread */
static final int statThreadPeriod = 60 * 5;
@ -176,6 +188,9 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/** Overhead of the structure itself */
private long overhead;
/** Whether in-memory hfile's data block has higher priority when evicting */
private boolean forceInMemory;
/** Where to send victims (blocks evicted from the cache) */
private BucketCache victimHandler = null;
@ -200,8 +215,11 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
(int)Math.ceil(1.2*maxSize/blockSize),
DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
DEFAULT_MIN_FACTOR, DEFAULT_ACCEPTABLE_FACTOR,
DEFAULT_SINGLE_FACTOR, DEFAULT_MULTI_FACTOR,
DEFAULT_MEMORY_FACTOR);
DEFAULT_SINGLE_FACTOR,
DEFAULT_MULTI_FACTOR,
DEFAULT_MEMORY_FACTOR,
false
);
}
public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
@ -211,9 +229,11 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
DEFAULT_CONCURRENCY_LEVEL,
conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
DEFAULT_SINGLE_FACTOR,
DEFAULT_MULTI_FACTOR,
DEFAULT_MEMORY_FACTOR);
conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE)
);
}
public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
@ -236,11 +256,12 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
*/
public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
float minFactor, float acceptableFactor,
float singleFactor, float multiFactor, float memoryFactor) {
if(singleFactor + multiFactor + memoryFactor != 1) {
float minFactor, float acceptableFactor, float singleFactor,
float multiFactor, float memoryFactor, boolean forceInMemory) {
if(singleFactor + multiFactor + memoryFactor != 1 ||
singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) {
throw new IllegalArgumentException("Single, multi, and memory factors " +
" should total 1.0");
" should be non-negative and total 1.0");
}
if(minFactor >= acceptableFactor) {
throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
@ -250,6 +271,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
}
this.maxSize = maxSize;
this.blockSize = blockSize;
this.forceInMemory = forceInMemory;
map = new ConcurrentHashMap<BlockCacheKey,CachedBlock>(mapInitialSize,
mapLoadFactor, mapConcurrencyLevel);
this.minFactor = minFactor;
@ -498,6 +520,38 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
}
}
long bytesFreed = 0;
if (forceInMemory || memoryFactor > 0.999f) {
long s = bucketSingle.totalSize();
long m = bucketMulti.totalSize();
if (bytesToFree > (s + m)) {
// this means we need to evict blocks in memory bucket to make room,
// so the single and multi buckets will be emptied
bytesFreed = bucketSingle.free(s);
bytesFreed += bucketMulti.free(m);
bytesFreed += bucketMemory.free(bytesToFree - bytesFreed);
} else {
// this means no need to evict block in memory bucket,
// and we try best to make the ratio between single-bucket and
// multi-bucket is 1:2
long bytesRemain = s + m - bytesToFree;
if (3 * s <= bytesRemain) {
// single-bucket is small enough that no eviction happens for it
// hence all eviction goes from multi-bucket
bytesFreed = bucketMulti.free(bytesToFree);
} else if (3 * m <= 2 * bytesRemain) {
// multi-bucket is small enough that no eviction happens for it
// hence all eviction goes from single-bucket
bytesFreed = bucketSingle.free(bytesToFree);
} else {
// both buckets need to evict some blocks
bytesFreed = bucketSingle.free(s - bytesRemain / 3);
if (bytesFreed < bytesToFree) {
bytesFreed += bucketMulti.free(bytesToFree - bytesFreed);
}
}
}
} else {
PriorityQueue<BlockBucket> bucketQueue =
new PriorityQueue<BlockBucket>(3);
@ -506,7 +560,6 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
bucketQueue.add(bucketMemory);
int remainingBuckets = 3;
long bytesFreed = 0;
BlockBucket bucket;
while((bucket = bucketQueue.poll()) != null) {
@ -518,6 +571,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
}
remainingBuckets--;
}
}
if (LOG.isTraceEnabled()) {
long single = bucketSingle.totalSize();

View File

@ -255,8 +255,8 @@ public class TestLruBlockCache {
0.99f, // acceptable
0.33f, // single
0.33f, // multi
0.34f);// memory
0.34f, // memory
false);
CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
@ -360,8 +360,109 @@ public class TestLruBlockCache {
assertEquals(null, cache.getBlock(memoryBlocks[1].cacheKey, true, false));
assertEquals(null, cache.getBlock(memoryBlocks[2].cacheKey, true, false));
assertEquals(null, cache.getBlock(memoryBlocks[3].cacheKey, true, false));
}
@Test
public void testCacheEvictionInMemoryForceMode() throws Exception {
long maxSize = 100000;
long blockSize = calculateBlockSize(maxSize, 10);
LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
(int)Math.ceil(1.2*maxSize/blockSize),
LruBlockCache.DEFAULT_LOAD_FACTOR,
LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
0.98f, // min
0.99f, // acceptable
0.2f, // single
0.3f, // multi
0.5f, // memory
true);
CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");
CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory");
long expectedCacheSize = cache.heapSize();
// 0. Add 5 single blocks and 4 multi blocks to make cache full, si:mu:me = 5:4:0
for(int i = 0; i < 4; i++) {
// Just add single blocks
cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
expectedCacheSize += singleBlocks[i].cacheBlockHeapSize();
// Add and get multi blocks
cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
expectedCacheSize += multiBlocks[i].cacheBlockHeapSize();
cache.getBlock(multiBlocks[i].cacheKey, true, false);
}
// 5th single block
cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]);
expectedCacheSize += singleBlocks[4].cacheBlockHeapSize();
// Do not expect any evictions yet
assertEquals(0, cache.getEvictionCount());
// Verify cache size
assertEquals(expectedCacheSize, cache.heapSize());
// 1. Insert a memory block, oldest single should be evicted, si:mu:me = 4:4:1
cache.cacheBlock(memoryBlocks[0].cacheKey, memoryBlocks[0], true);
// Single eviction, one block evicted
assertEquals(1, cache.getEvictionCount());
assertEquals(1, cache.getEvictedCount());
// Verify oldest single block (index = 0) is the one evicted
assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false));
// 2. Insert another memory block, another single evicted, si:mu:me = 3:4:2
cache.cacheBlock(memoryBlocks[1].cacheKey, memoryBlocks[1], true);
// Two evictions, two evicted.
assertEquals(2, cache.getEvictionCount());
assertEquals(2, cache.getEvictedCount());
// Current oldest single block (index = 1) should be evicted now
assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false));
// 3. Insert 4 memory blocks, 2 single and 2 multi evicted, si:mu:me = 1:2:6
cache.cacheBlock(memoryBlocks[2].cacheKey, memoryBlocks[2], true);
cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true);
cache.cacheBlock(memoryBlocks[4].cacheKey, memoryBlocks[4], true);
cache.cacheBlock(memoryBlocks[5].cacheKey, memoryBlocks[5], true);
// Three evictions, three evicted.
assertEquals(6, cache.getEvictionCount());
assertEquals(6, cache.getEvictedCount());
// two oldest single blocks and two oldest multi blocks evicted
assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false));
assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false));
assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false));
assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false));
// 4. Insert 3 memory blocks, the remaining 1 single and 2 multi evicted
// si:mu:me = 0:0:9
cache.cacheBlock(memoryBlocks[6].cacheKey, memoryBlocks[6], true);
cache.cacheBlock(memoryBlocks[7].cacheKey, memoryBlocks[7], true);
cache.cacheBlock(memoryBlocks[8].cacheKey, memoryBlocks[8], true);
// Three evictions, three evicted.
assertEquals(9, cache.getEvictionCount());
assertEquals(9, cache.getEvictedCount());
// one oldest single block and two oldest multi blocks evicted
assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false));
assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false));
assertEquals(null, cache.getBlock(multiBlocks[3].cacheKey, true, false));
// 5. Insert one memory block, the oldest memory evicted
// si:mu:me = 0:0:9
cache.cacheBlock(memoryBlocks[9].cacheKey, memoryBlocks[9], true);
// one eviction, one evicted.
assertEquals(10, cache.getEvictionCount());
assertEquals(10, cache.getEvictedCount());
// oldest memory block evicted
assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false));
// 6. Insert one new single block, itself evicted immediately since
// all blocks in cache are memory-type which have higher priority
// si:mu:me = 0:0:9 (no change)
cache.cacheBlock(singleBlocks[9].cacheKey, singleBlocks[9]);
// one eviction, one evicted.
assertEquals(11, cache.getEvictionCount());
assertEquals(11, cache.getEvictedCount());
// the single block just cached now evicted (can't evict memory)
assertEquals(null, cache.getBlock(singleBlocks[9].cacheKey, true, false));
}
// test scan resistance
@ -379,7 +480,8 @@ public class TestLruBlockCache {
0.99f, // acceptable
0.33f, // single
0.33f, // multi
0.34f);// memory
0.34f, // memory
false);
CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single");
CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
@ -442,7 +544,8 @@ public class TestLruBlockCache {
0.99f, // acceptable
0.33f, // single
0.33f, // multi
0.34f);// memory
0.34f, // memory
false);
CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");