HBASE-16157 The incorrect block cache count and size are caused by removing duplicate block key in the LruBlockCache (ChiaPing Tsai)

This commit is contained in:
tedyu 2016-07-05 13:54:59 -07:00
parent ff8c2fcac0
commit 368c32e322
2 changed files with 63 additions and 4 deletions

View File

@ -475,8 +475,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
public boolean evictBlock(BlockCacheKey cacheKey) {
LruCachedBlock cb = map.get(cacheKey);
if (cb == null) return false;
evictBlock(cb, false);
return true;
return evictBlock(cb, false) > 0;
}
/**
@ -513,7 +512,10 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
* @return the heap size of evicted block
*/
protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) {
map.remove(block.getCacheKey());
boolean found = map.remove(block.getCacheKey()) != null;
if (!found) {
return 0;
}
updateSizeMetrics(block, true);
long val = elements.decrementAndGet();
if (LOG.isTraceEnabled()) {
@ -545,6 +547,16 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
}
}
@VisibleForTesting
boolean isEvictionInProgress() {
return evictionInProgress;
}
@VisibleForTesting
long getOverhead() {
return overhead;
}
/**
* Eviction method.
*/
@ -652,7 +664,6 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
remainingBuckets--;
}
}
if (LOG.isTraceEnabled()) {
long single = bucketSingle.totalSize();
long multi = bucketMulti.totalSize();

View File

@ -25,6 +25,10 @@ import static org.junit.Assert.assertTrue;
import java.nio.ByteBuffer;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.io.HeapSize;
@ -43,7 +47,46 @@ import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestLruBlockCache {
@Test
public void testCacheEvictionThreadSafe() throws Exception {
long maxSize = 100000;
int numBlocks = 9;
int testRuns = 10;
final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
final LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
EvictionThread evictionThread = cache.getEvictionThread();
assertTrue(evictionThread != null);
while (!evictionThread.isEnteringRun()) {
Thread.sleep(1);
}
final String hfileName = "hfile";
int threads = 10;
final int blocksPerThread = 5 * numBlocks;
for (int run = 0; run != testRuns; ++run) {
final AtomicInteger blockCount = new AtomicInteger(0);
ExecutorService service = Executors.newFixedThreadPool(threads);
for (int i = 0; i != threads; ++i) {
service.execute(new Runnable() {
@Override
public void run() {
for (int blockIndex = 0; blockIndex < blocksPerThread || (!cache.isEvictionInProgress()); ++blockIndex) {
CachedItem block = new CachedItem(hfileName, (int) blockSize, blockCount.getAndIncrement());
boolean inMemory = Math.random() > 0.5;
cache.cacheBlock(block.cacheKey, block, inMemory, false);
}
cache.evictBlocksByHfileName(hfileName);
}
});
}
service.shutdown();
// The test may fail here if the evict thread frees the blocks too fast
service.awaitTermination(10, TimeUnit.MINUTES);
assertEquals(0, cache.getBlockCount());
assertEquals(cache.getOverhead(), cache.getCurrentSize());
}
}
@Test
public void testBackgroundEvictionThread() throws Exception {
long maxSize = 100000;
@ -784,6 +827,11 @@ public class TestLruBlockCache {
BlockCacheKey cacheKey;
int size;
CachedItem(String blockName, int size, int offset) {
this.cacheKey = new BlockCacheKey(blockName, offset);
this.size = size;
}
CachedItem(String blockName, int size) {
this.cacheKey = new BlockCacheKey(blockName, 0);
this.size = size;