Code cleanup of LruBlockCache

Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
Lars Francke 2016-11-28 10:55:35 +01:00 committed by Michael Stack
parent f2e363b589
commit ad857d1b77
1 changed files with 110 additions and 107 deletions

View File

@ -1,4 +1,4 @@
/**
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -103,50 +103,55 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* Percentage of total size that eviction will evict until; e.g. if set to .8, then we will keep
* evicting during an eviction run till the cache size is down to 80% of the total.
*/
static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
private static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
/**
* Acceptable size of cache (no evictions if size < acceptable)
*/
static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
private static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME =
"hbase.lru.blockcache.acceptable.factor";
/**
* Hard capacity limit of cache, will reject any put if size > this * acceptable
*/
static final String LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.hard.capacity.limit.factor";
static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
static final String LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME =
"hbase.lru.blockcache.hard.capacity.limit.factor";
private static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME =
"hbase.lru.blockcache.single.percentage";
private static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME =
"hbase.lru.blockcache.multi.percentage";
private static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME =
"hbase.lru.blockcache.memory.percentage";
/**
* Configuration key to force data-block always (except in-memory are too much)
* cached in memory for in-memory hfile, unlike inMemory, which is a column-family
* configuration, inMemoryForceMode is a cluster-wide configuration
*/
static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
private static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME =
"hbase.lru.rs.inmemoryforcemode";
/** Default Configuration Parameters*/
/* Default Configuration Parameters*/
/** Backing Concurrent Map Configuration */
/* Backing Concurrent Map Configuration */
static final float DEFAULT_LOAD_FACTOR = 0.75f;
static final int DEFAULT_CONCURRENCY_LEVEL = 16;
/** Eviction thresholds */
static final float DEFAULT_MIN_FACTOR = 0.95f;
/* Eviction thresholds */
private static final float DEFAULT_MIN_FACTOR = 0.95f;
static final float DEFAULT_ACCEPTABLE_FACTOR = 0.99f;
/** Priority buckets */
static final float DEFAULT_SINGLE_FACTOR = 0.25f;
static final float DEFAULT_MULTI_FACTOR = 0.50f;
static final float DEFAULT_MEMORY_FACTOR = 0.25f;
/* Priority buckets */
private static final float DEFAULT_SINGLE_FACTOR = 0.25f;
private static final float DEFAULT_MULTI_FACTOR = 0.50f;
private static final float DEFAULT_MEMORY_FACTOR = 0.25f;
/** default hard capacity limit */
static final float DEFAULT_HARD_CAPACITY_LIMIT_FACTOR = 1.2f;
private static final float DEFAULT_HARD_CAPACITY_LIMIT_FACTOR = 1.2f;
static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
private static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
/** Statistics thread */
static final int statThreadPeriod = 60 * 5;
/* Statistics thread */
private static final int STAT_THREAD_PERIOD = 60 * 5;
private static final String LRU_MAX_BLOCK_SIZE = "hbase.lru.max.block.size";
private static final long DEFAULT_MAX_BLOCK_SIZE = 16L * 1024L * 1024L;
@ -218,6 +223,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
*
* <p>All other factors will be calculated based on defaults specified in
* this class.
*
* @param maxSize maximum size of cache, in bytes
* @param blockSize approximate size of each block, in bytes
*/
@ -252,7 +258,8 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
conf.getFloat(LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, DEFAULT_HARD_CAPACITY_LIMIT_FACTOR),
conf.getFloat(LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME,
DEFAULT_HARD_CAPACITY_LIMIT_FACTOR),
conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE),
conf.getLong(LRU_MAX_BLOCK_SIZE, DEFAULT_MAX_BLOCK_SIZE)
);
@ -264,6 +271,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/**
* Configurable constructor. Use this constructor if not using defaults.
*
* @param maxSize maximum size of this cache, in bytes
* @param blockSize expected average size of blocks, in bytes
* @param evictionThread whether to run evictions in a bg thread or not
@ -296,8 +304,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
this.maxSize = maxSize;
this.blockSize = blockSize;
this.forceInMemory = forceInMemory;
map = new ConcurrentHashMap<BlockCacheKey,LruCachedBlock>(mapInitialSize,
mapLoadFactor, mapConcurrencyLevel);
map = new ConcurrentHashMap<>(mapInitialSize, mapLoadFactor, mapConcurrencyLevel);
this.minFactor = minFactor;
this.acceptableFactor = acceptableFactor;
this.singleFactor = singleFactor;
@ -317,8 +324,8 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
}
// TODO: Add means of turning this off. Bit obnoxious running thread just to make a log
// every five minutes.
this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this), STAT_THREAD_PERIOD,
STAT_THREAD_PERIOD, TimeUnit.SECONDS);
}
@Override
@ -336,10 +343,10 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* <p>
* It is assumed this will NOT be called on an already cached block. In rare cases (HBASE-8547)
* this can happen, for which we compare the buffer contents.
*
* @param cacheKey block's cache key
* @param buf block buffer
* @param inMemory if block is in-memory
* @param cacheDataInL1
*/
@Override
public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory,
@ -378,9 +385,9 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
stats.failInsert();
if (LOG.isTraceEnabled()) {
LOG.trace("LruBlockCache current size " + StringUtils.byteDesc(currentSize)
+ " has exceeded acceptable size " + StringUtils.byteDesc(currentAcceptableSize) + " too many."
+ " the hard limit size is " + StringUtils.byteDesc(hardLimitSize) + ", failed to put cacheKey:"
+ cacheKey + " into LruBlockCache.");
+ " has exceeded acceptable size " + StringUtils.byteDesc(currentAcceptableSize) + "."
+ " The hard limit size is " + StringUtils.byteDesc(hardLimitSize)
+ ", failed to put cacheKey:" + cacheKey + " into LruBlockCache.");
}
if (!evictionInProgress) {
runEviction();
@ -431,6 +438,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/**
* Cache the block with the specified name and buffer.
* <p>
*
* @param cacheKey block's cache key
* @param buf block buffer
*/
@ -442,11 +450,8 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* Helper function that updates the local size counter and also updates any
* per-cf or per-blocktype metrics it can discern from given
* {@link LruCachedBlock}
*
* @param cb
* @param evict
*/
protected long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
private long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
long heapsize = cb.heapSize();
if (evict) {
heapsize *= -1;
@ -456,11 +461,14 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/**
* Get the buffer of the block with the specified name.
*
* @param cacheKey block's cache key
* @param caching true if the caller caches blocks on cache misses
* @param repeat Whether this is a repeat lookup for the same block
* (used to avoid double counting cache misses when doing double-check locking)
* (used to avoid double counting cache misses when doing double-check
* locking)
* @param updateCacheMetrics Whether to update cache metrics or not
*
* @return buffer of specified cache key, or null if not in cache
*/
@Override
@ -495,7 +503,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/**
* Whether the cache contains block with specified cacheKey
* @param cacheKey
*
* @return true if contains the block
*/
public boolean containsBlock(BlockCacheKey cacheKey) {
@ -505,8 +513,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
@Override
public boolean evictBlock(BlockCacheKey cacheKey) {
LruCachedBlock cb = map.get(cacheKey);
if (cb == null) return false;
return evictBlock(cb, false) > 0;
return cb != null && evictBlock(cb, false) > 0;
}
/**
@ -537,7 +544,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/**
* Evict the block, and it will be cached by the victim handler if exists &amp;&amp;
* block may be read again later
* @param block
*
* @param evictedByEvictionProcess true if the given block is evicted by
* EvictionThread
* @return the heap size of evicted block
@ -614,12 +621,9 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
if (bytesToFree <= 0) return;
// Instantiate priority buckets
BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize,
singleSize());
BlockBucket bucketMulti = new BlockBucket("multi", bytesToFree, blockSize,
multiSize());
BlockBucket bucketMemory = new BlockBucket("memory", bytesToFree, blockSize,
memorySize());
BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize());
BlockBucket bucketMulti = new BlockBucket("multi", bytesToFree, blockSize, multiSize());
BlockBucket bucketMemory = new BlockBucket("memory", bytesToFree, blockSize, memorySize());
// Scan entire map putting into appropriate buckets
for (LruCachedBlock cachedBlock : map.values()) {
@ -679,8 +683,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
}
}
} else {
PriorityQueue<BlockBucket> bucketQueue =
new PriorityQueue<BlockBucket>(3);
PriorityQueue<BlockBucket> bucketQueue = new PriorityQueue<>(3);
bucketQueue.add(bucketSingle);
bucketQueue.add(bucketMulti);
@ -692,8 +695,8 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
while ((bucket = bucketQueue.poll()) != null) {
long overflow = bucket.overflow();
if (overflow > 0) {
long bucketBytesToFree = Math.min(overflow,
(bytesToFree - bytesFreed) / remainingBuckets);
long bucketBytesToFree =
Math.min(overflow, (bytesToFree - bytesFreed) / remainingBuckets);
bytesFreed += bucket.free(bucketBytesToFree);
}
remainingBuckets--;
@ -814,6 +817,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/**
* Get the maximum size of this cache.
*
* @return max size in bytes
*/
public long getMaxSize() {
@ -851,6 +855,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* Thread is triggered into action by {@link LruBlockCache#runEviction()}
*/
static class EvictionThread extends HasThread {
private WeakReference<LruBlockCache> cache;
private volatile boolean go = true;
// flag set after enter the run method, used for test
@ -859,7 +864,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
public EvictionThread(LruBlockCache cache) {
super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
setDaemon(true);
this.cache = new WeakReference<LruBlockCache>(cache);
this.cache = new WeakReference<>(cache);
}
@Override
@ -905,6 +910,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* Statistics thread. Periodically prints the cache statistics to the log.
*/
static class StatisticsThread extends Thread {
private final LruBlockCache lru;
public StatisticsThread(LruBlockCache lru) {
@ -960,12 +966,11 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
return getCurrentSize();
}
public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
private static long calculateOverhead(long maxSize, long blockSize, int concurrency) {
// FindBugs ICAST_INTEGER_MULTIPLY_CAST_TO_LONG
return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
((long)Math.ceil(maxSize*1.2/blockSize)
* ClassSize.CONCURRENT_HASHMAP_ENTRY) +
((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP
+ ((long) Math.ceil(maxSize * 1.2 / blockSize) * ClassSize.CONCURRENT_HASHMAP_ENTRY)
+ ((long) concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
}
@Override
@ -1026,8 +1031,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
diff = Long.compare(this.getOffset(), other.getOffset());
if (diff != 0) return diff;
if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
throw new IllegalStateException("" + this.getCachedTime() + ", " +
other.getCachedTime());
throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime());
}
return Long.compare(other.getCachedTime(), this.getCachedTime());
}
@ -1075,8 +1079,9 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
}
public void shutdown() {
if (victimHandler != null)
if (victimHandler != null) {
victimHandler.shutdown();
}
this.scheduleThreadPool.shutdown();
for (int i = 0; i < 10; i++) {
if (!this.scheduleThreadPool.isShutdown()) {
@ -1106,11 +1111,12 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
/**
* Used in testing. May be very inefficient.
*
* @return the set of cached file names
*/
@VisibleForTesting
SortedSet<String> getCachedFileNamesForTest() {
SortedSet<String> fileNames = new TreeSet<String>();
SortedSet<String> fileNames = new TreeSet<>();
for (BlockCacheKey cacheKey : map.keySet()) {
fileNames.add(cacheKey.getHfileName());
}
@ -1119,10 +1125,9 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
@VisibleForTesting
Map<BlockType, Integer> getBlockTypeCountsForTest() {
Map<BlockType, Integer> counts =
new EnumMap<BlockType, Integer>(BlockType.class);
Map<BlockType, Integer> counts = new EnumMap<>(BlockType.class);
for (LruCachedBlock cb : map.values()) {
BlockType blockType = ((Cacheable)cb.getBuffer()).getBlockType();
BlockType blockType = cb.getBuffer().getBlockType();
Integer count = counts.get(blockType);
counts.put(blockType, (count == null ? 0 : count) + 1);
}
@ -1131,11 +1136,9 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
@VisibleForTesting
public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
Map<DataBlockEncoding, Integer> counts =
new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
Map<DataBlockEncoding, Integer> counts = new EnumMap<>(DataBlockEncoding.class);
for (LruCachedBlock block : map.values()) {
DataBlockEncoding encoding =
((HFileBlock) block.getBuffer()).getDataBlockEncoding();
DataBlockEncoding encoding = ((HFileBlock) block.getBuffer()).getDataBlockEncoding();
Integer count = counts.get(encoding);
counts.put(encoding, (count == null ? 0 : count) + 1);
}