From d016338e45dca183fc05f254e3e1260d04511269 Mon Sep 17 00:00:00 2001 From: tedyu Date: Tue, 5 Jul 2016 10:21:27 -0700 Subject: [PATCH] HBASE-16157 The incorrect block cache count and size are caused by removing duplicate block key in the LruBlockCache (ChiaPing Tsai) --- .../hadoop/hbase/io/hfile/LruBlockCache.java | 19 ++++++-- .../hbase/io/hfile/TestLruBlockCache.java | 48 +++++++++++++++++++ 2 files changed, 63 insertions(+), 4 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index c380318c48a..41b46f2f1fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -473,8 +473,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { public boolean evictBlock(BlockCacheKey cacheKey) { LruCachedBlock cb = map.get(cacheKey); if (cb == null) return false; - evictBlock(cb, false); - return true; + return evictBlock(cb, false) > 0; } /** @@ -511,7 +510,10 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { * @return the heap size of evicted block */ protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) { - map.remove(block.getCacheKey()); + boolean found = map.remove(block.getCacheKey()) != null; + if (!found) { + return 0; + } updateSizeMetrics(block, true); long val = elements.decrementAndGet(); if (LOG.isTraceEnabled()) { @@ -543,6 +545,16 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { } } + @VisibleForTesting + boolean isEvictionInProgress() { + return evictionInProgress; + } + + @VisibleForTesting + long getOverhead() { + return overhead; + } + /** * Eviction method. */ @@ -650,7 +662,6 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { remainingBuckets--; } } - if (LOG.isTraceEnabled()) { long single = bucketSingle.totalSize(); long multi = bucketMulti.totalSize(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java index d7f9aba6a91..0d8a3bbbc1b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java @@ -25,6 +25,10 @@ import static org.junit.Assert.assertTrue; import java.nio.ByteBuffer; import java.util.Random; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -44,7 +48,46 @@ import org.junit.experimental.categories.Category; @Category({IOTests.class, SmallTests.class}) public class TestLruBlockCache { + @Test + public void testCacheEvictionThreadSafe() throws Exception { + long maxSize = 100000; + int numBlocks = 9; + int testRuns = 10; + final long blockSize = calculateBlockSizeDefault(maxSize, numBlocks); + assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); + final LruBlockCache cache = new LruBlockCache(maxSize, blockSize); + EvictionThread evictionThread = cache.getEvictionThread(); + assertTrue(evictionThread != null); + while (!evictionThread.isEnteringRun()) { + Thread.sleep(1); + } + final String hfileName = "hfile"; + int threads = 10; + final int blocksPerThread = 5 * numBlocks; + for (int run = 0; run != testRuns; ++run) { + final AtomicInteger blockCount = new AtomicInteger(0); + ExecutorService service = Executors.newFixedThreadPool(threads); + for (int i = 0; i != threads; ++i) { + service.execute(new Runnable() { + @Override + public void run() { + for (int blockIndex = 0; blockIndex < blocksPerThread || (!cache.isEvictionInProgress()); ++blockIndex) { + CachedItem block = new CachedItem(hfileName, (int) blockSize, blockCount.getAndIncrement()); + boolean inMemory = Math.random() > 0.5; + cache.cacheBlock(block.cacheKey, block, inMemory, false); + } + cache.evictBlocksByHfileName(hfileName); + } + }); + } + service.shutdown(); + // The test may fail here if the evict thread frees the blocks too fast + service.awaitTermination(10, TimeUnit.MINUTES); + assertEquals(0, cache.getBlockCount()); + assertEquals(cache.getOverhead(), cache.getCurrentSize()); + } + } @Test public void testBackgroundEvictionThread() throws Exception { long maxSize = 100000; @@ -785,6 +828,11 @@ public class TestLruBlockCache { BlockCacheKey cacheKey; int size; + CachedItem(String blockName, int size, int offset) { + this.cacheKey = new BlockCacheKey(blockName, offset); + this.size = size; + } + CachedItem(String blockName, int size) { this.cacheKey = new BlockCacheKey(blockName, 0); this.size = size;