HBASE-2253 Show Block cache hit ratio for requests where cacheBlocks=true
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1029123 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b6604764b5
commit
590ada408a
@ -1072,6 +1072,9 @@ Release 0.21.0 - Unreleased
|
||||
caching
|
||||
HBASE-3162 Add TimeRange support into Increment to optimize for counters
|
||||
that are partitioned on time
|
||||
HBASE-2253 Show Block cache hit ratio for requests where
|
||||
cacheBlocks=true
|
||||
|
||||
|
||||
NEW FEATURES
|
||||
HBASE-1961 HBase EC2 scripts
|
||||
|
@ -44,9 +44,10 @@ public interface BlockCache {
|
||||
/**
|
||||
* Fetch block from cache.
|
||||
* @param blockName Block number to fetch.
|
||||
* @param caching Whether this request has caching enabled (used for stats)
|
||||
* @return Block or null if block is not in the cache.
|
||||
*/
|
||||
public ByteBuffer getBlock(String blockName);
|
||||
public ByteBuffer getBlock(String blockName, boolean caching);
|
||||
|
||||
/**
|
||||
* Shutdown the cache.
|
||||
|
@ -950,7 +950,8 @@ public class HFile {
|
||||
metaLoads++;
|
||||
// Check cache for block. If found return.
|
||||
if (cache != null) {
|
||||
ByteBuffer cachedBuf = cache.getBlock(name + "meta" + block);
|
||||
ByteBuffer cachedBuf = cache.getBlock(name + "meta" + block,
|
||||
cacheBlock);
|
||||
if (cachedBuf != null) {
|
||||
// Return a distinct 'shallow copy' of the block,
|
||||
// so pos doesnt get messed by the scanner
|
||||
@ -1009,7 +1010,7 @@ public class HFile {
|
||||
blockLoads++;
|
||||
// Check cache for block. If found return.
|
||||
if (cache != null) {
|
||||
ByteBuffer cachedBuf = cache.getBlock(name + block);
|
||||
ByteBuffer cachedBuf = cache.getBlock(name + block, cacheBlock);
|
||||
if (cachedBuf != null) {
|
||||
// Return a distinct 'shallow copy' of the block,
|
||||
// so pos doesnt get messed by the scanner
|
||||
|
@ -279,13 +279,13 @@ public class LruBlockCache implements BlockCache, HeapSize {
|
||||
* @param blockName block name
|
||||
* @return buffer of specified block name, or null if not in cache
|
||||
*/
|
||||
public ByteBuffer getBlock(String blockName) {
|
||||
public ByteBuffer getBlock(String blockName, boolean caching) {
|
||||
CachedBlock cb = map.get(blockName);
|
||||
if(cb == null) {
|
||||
stats.miss();
|
||||
stats.miss(caching);
|
||||
return null;
|
||||
}
|
||||
stats.hit();
|
||||
stats.hit(caching);
|
||||
cb.access(count.incrementAndGet());
|
||||
return cb.getBuffer();
|
||||
}
|
||||
@ -555,7 +555,11 @@ public class LruBlockCache implements BlockCache, HeapSize {
|
||||
"blocks=" + size() +", " +
|
||||
"accesses=" + stats.getRequestCount() + ", " +
|
||||
"hits=" + stats.getHitCount() + ", " +
|
||||
"hitRatio=" + StringUtils.formatPercent(stats.getHitRatio(), 2) + "%, " +
|
||||
"hitRatio=" + StringUtils.formatPercent(stats.getHitRatio(), 2) + "%, "+
|
||||
"cachingAccesses=" + stats.getRequestCachingCount() + ", " +
|
||||
"cachingHits=" + stats.getHitCachingCount() + ", " +
|
||||
"cachingHitsRatio=" +
|
||||
StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + "%, " +
|
||||
"evictions=" + stats.getEvictionCount() + ", " +
|
||||
"evicted=" + stats.getEvictedCount() + ", " +
|
||||
"evictedPerRun=" + stats.evictedPerEviction());
|
||||
@ -572,20 +576,35 @@ public class LruBlockCache implements BlockCache, HeapSize {
|
||||
}
|
||||
|
||||
public static class CacheStats {
|
||||
private final AtomicLong accessCount = new AtomicLong(0);
|
||||
/** The number of getBlock requests that were cache hits */
|
||||
private final AtomicLong hitCount = new AtomicLong(0);
|
||||
/**
|
||||
* The number of getBlock requests that were cache hits, but only from
|
||||
* requests that were set to use the block cache. This is because all reads
|
||||
* attempt to read from the block cache even if they will not put new blocks
|
||||
* into the block cache. See HBASE-2253 for more information.
|
||||
*/
|
||||
private final AtomicLong hitCachingCount = new AtomicLong(0);
|
||||
/** The number of getBlock requests that were cache misses */
|
||||
private final AtomicLong missCount = new AtomicLong(0);
|
||||
/**
|
||||
* The number of getBlock requests that were cache misses, but only from
|
||||
* requests that were set to use the block cache.
|
||||
*/
|
||||
private final AtomicLong missCachingCount = new AtomicLong(0);
|
||||
/** The number of times an eviction has occurred */
|
||||
private final AtomicLong evictionCount = new AtomicLong(0);
|
||||
/** The total number of blocks that have been evicted */
|
||||
private final AtomicLong evictedCount = new AtomicLong(0);
|
||||
|
||||
public void miss() {
|
||||
public void miss(boolean caching) {
|
||||
missCount.incrementAndGet();
|
||||
accessCount.incrementAndGet();
|
||||
if (caching) missCachingCount.incrementAndGet();
|
||||
}
|
||||
|
||||
public void hit() {
|
||||
public void hit(boolean caching) {
|
||||
hitCount.incrementAndGet();
|
||||
accessCount.incrementAndGet();
|
||||
if (caching) hitCachingCount.incrementAndGet();
|
||||
}
|
||||
|
||||
public void evict() {
|
||||
@ -597,15 +616,27 @@ public class LruBlockCache implements BlockCache, HeapSize {
|
||||
}
|
||||
|
||||
public long getRequestCount() {
|
||||
return accessCount.get();
|
||||
return getHitCount() + getMissCount();
|
||||
}
|
||||
|
||||
public long getRequestCachingCount() {
|
||||
return getHitCachingCount() + getMissCachingCount();
|
||||
}
|
||||
|
||||
public long getMissCount() {
|
||||
return missCount.get();
|
||||
}
|
||||
|
||||
public long getMissCachingCount() {
|
||||
return missCachingCount.get();
|
||||
}
|
||||
|
||||
public long getHitCount() {
|
||||
return hitCount.get();
|
||||
return hitCachingCount.get();
|
||||
}
|
||||
|
||||
public long getHitCachingCount() {
|
||||
return hitCachingCount.get();
|
||||
}
|
||||
|
||||
public long getEvictionCount() {
|
||||
@ -620,12 +651,20 @@ public class LruBlockCache implements BlockCache, HeapSize {
|
||||
return ((float)getHitCount()/(float)getRequestCount());
|
||||
}
|
||||
|
||||
public double getHitCachingRatio() {
|
||||
return ((float)getHitCachingCount()/(float)getRequestCachingCount());
|
||||
}
|
||||
|
||||
public double getMissRatio() {
|
||||
return ((float)getMissCount()/(float)getRequestCount());
|
||||
}
|
||||
|
||||
public double getMissCachingRatio() {
|
||||
return ((float)getMissCachingCount()/(float)getRequestCachingCount());
|
||||
}
|
||||
|
||||
public double evictedPerEviction() {
|
||||
return (float)((float)getEvictedCount()/(float)getEvictionCount());
|
||||
return ((float)getEvictedCount()/(float)getEvictionCount());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ public class SimpleBlockCache implements BlockCache {
|
||||
return cache.size();
|
||||
}
|
||||
|
||||
public synchronized ByteBuffer getBlock(String blockName) {
|
||||
public synchronized ByteBuffer getBlock(String blockName, boolean caching) {
|
||||
processQueue(); // clear out some crap.
|
||||
Ref ref = cache.get(blockName);
|
||||
if (ref == null)
|
||||
|
@ -1113,6 +1113,9 @@ public class HRegionServer implements HRegionInterface, HBaseRPCErrorHandler,
|
||||
double ratio = lruBlockCache.getStats().getHitRatio();
|
||||
int percent = (int) (ratio * 100);
|
||||
this.metrics.blockCacheHitRatio.set(percent);
|
||||
ratio = lruBlockCache.getStats().getHitCachingRatio();
|
||||
percent = (int) (ratio * 100);
|
||||
this.metrics.blockCacheHitCachingRatio.set(percent);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,6 +90,12 @@ public class RegionServerMetrics implements Updater {
|
||||
*/
|
||||
public final MetricsIntValue blockCacheHitRatio = new MetricsIntValue("blockCacheHitRatio", registry);
|
||||
|
||||
/**
|
||||
* Block hit caching ratio. This only includes the requests to the block
|
||||
* cache where caching was turned on. See HBASE-2253.
|
||||
*/
|
||||
public final MetricsIntValue blockCacheHitCachingRatio = new MetricsIntValue("blockCacheHitCachingRatio", registry);
|
||||
|
||||
/*
|
||||
* Count of requests to the regionservers since last call to metrics update
|
||||
*/
|
||||
@ -170,14 +176,14 @@ public class RegionServerMetrics implements Updater {
|
||||
|
||||
// export for JMX
|
||||
statistics = new RegionServerStatistics(this.registry, name);
|
||||
|
||||
|
||||
// get custom attributes
|
||||
try {
|
||||
Object m = ContextFactory.getFactory().getAttribute("hbase.extendedperiod");
|
||||
if (m instanceof String) {
|
||||
this.extendedPeriod = Long.parseLong((String) m)*1000;
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
} catch (IOException ioe) {
|
||||
LOG.info("Couldn't load ContextFactory for Metrics config info");
|
||||
}
|
||||
|
||||
@ -199,7 +205,7 @@ public class RegionServerMetrics implements Updater {
|
||||
this.lastUpdate = System.currentTimeMillis();
|
||||
|
||||
// has the extended period for long-living stats elapsed?
|
||||
if (this.extendedPeriod > 0 &&
|
||||
if (this.extendedPeriod > 0 &&
|
||||
this.lastUpdate - this.lastExtUpdate >= this.extendedPeriod) {
|
||||
this.lastExtUpdate = this.lastUpdate;
|
||||
this.compactionTime.resetMinMaxAvg();
|
||||
@ -208,7 +214,7 @@ public class RegionServerMetrics implements Updater {
|
||||
this.flushSize.resetMinMaxAvg();
|
||||
this.resetAllMinMax();
|
||||
}
|
||||
|
||||
|
||||
this.stores.pushMetric(this.metricsRecord);
|
||||
this.storefiles.pushMetric(this.metricsRecord);
|
||||
this.storefileIndexSizeMB.pushMetric(this.metricsRecord);
|
||||
@ -220,6 +226,7 @@ public class RegionServerMetrics implements Updater {
|
||||
this.blockCacheFree.pushMetric(this.metricsRecord);
|
||||
this.blockCacheCount.pushMetric(this.metricsRecord);
|
||||
this.blockCacheHitRatio.pushMetric(this.metricsRecord);
|
||||
this.blockCacheHitCachingRatio.pushMetric(this.metricsRecord);
|
||||
|
||||
// Mix in HFile and HLog metrics
|
||||
// Be careful. Here is code for MTVR from up in hadoop:
|
||||
@ -265,10 +272,10 @@ public class RegionServerMetrics implements Updater {
|
||||
public float getRequests() {
|
||||
return this.requests.getPreviousIntervalValue();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param compact history in <time, size>
|
||||
*/
|
||||
*/
|
||||
public synchronized void addCompaction(final Pair<Long,Long> compact) {
|
||||
this.compactionTime.inc(compact.getFirst());
|
||||
this.compactionSize.inc(compact.getSecond());
|
||||
@ -283,7 +290,7 @@ public class RegionServerMetrics implements Updater {
|
||||
this.flushSize.inc(f.getSecond());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param inc How much to add to requests.
|
||||
*/
|
||||
@ -328,6 +335,8 @@ public class RegionServerMetrics implements Updater {
|
||||
Long.valueOf(this.blockCacheCount.get()));
|
||||
sb = Strings.appendKeyValue(sb, this.blockCacheHitRatio.getName(),
|
||||
Long.valueOf(this.blockCacheHitRatio.get()));
|
||||
sb = Strings.appendKeyValue(sb, this.blockCacheHitCachingRatio.getName(),
|
||||
Long.valueOf(this.blockCacheHitCachingRatio.get()));
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
|
||||
// Confirm empty
|
||||
for(Block block : blocks) {
|
||||
assertTrue(cache.getBlock(block.blockName) == null);
|
||||
assertTrue(cache.getBlock(block.blockName, true) == null);
|
||||
}
|
||||
|
||||
// Add blocks
|
||||
@ -90,7 +90,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
|
||||
// Check if all blocks are properly cached and retrieved
|
||||
for(Block block : blocks) {
|
||||
ByteBuffer buf = cache.getBlock(block.blockName);
|
||||
ByteBuffer buf = cache.getBlock(block.blockName, true);
|
||||
assertTrue(buf != null);
|
||||
assertEquals(buf.capacity(), block.buf.capacity());
|
||||
}
|
||||
@ -110,7 +110,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
|
||||
// Check if all blocks are properly cached and retrieved
|
||||
for(Block block : blocks) {
|
||||
ByteBuffer buf = cache.getBlock(block.blockName);
|
||||
ByteBuffer buf = cache.getBlock(block.blockName, true);
|
||||
assertTrue(buf != null);
|
||||
assertEquals(buf.capacity(), block.buf.capacity());
|
||||
}
|
||||
@ -154,10 +154,10 @@ public class TestLruBlockCache extends TestCase {
|
||||
(maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
|
||||
|
||||
// All blocks except block 0 and 1 should be in the cache
|
||||
assertTrue(cache.getBlock(blocks[0].blockName) == null);
|
||||
assertTrue(cache.getBlock(blocks[1].blockName) == null);
|
||||
assertTrue(cache.getBlock(blocks[0].blockName, true) == null);
|
||||
assertTrue(cache.getBlock(blocks[1].blockName, true) == null);
|
||||
for(int i=2;i<blocks.length;i++) {
|
||||
assertEquals(cache.getBlock(blocks[i].blockName),
|
||||
assertEquals(cache.getBlock(blocks[i].blockName, true),
|
||||
blocks[i].buf);
|
||||
}
|
||||
}
|
||||
@ -178,7 +178,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
for(Block block : multiBlocks) {
|
||||
cache.cacheBlock(block.blockName, block.buf);
|
||||
expectedCacheSize += block.heapSize();
|
||||
assertEquals(cache.getBlock(block.blockName), block.buf);
|
||||
assertEquals(cache.getBlock(block.blockName, true), block.buf);
|
||||
}
|
||||
|
||||
// Add the single blocks (no get)
|
||||
@ -208,14 +208,14 @@ public class TestLruBlockCache extends TestCase {
|
||||
// This test makes multi go barely over its limit, in-memory
|
||||
// empty, and the rest in single. Two single evictions and
|
||||
// one multi eviction expected.
|
||||
assertTrue(cache.getBlock(singleBlocks[0].blockName) == null);
|
||||
assertTrue(cache.getBlock(multiBlocks[0].blockName) == null);
|
||||
assertTrue(cache.getBlock(singleBlocks[0].blockName, true) == null);
|
||||
assertTrue(cache.getBlock(multiBlocks[0].blockName, true) == null);
|
||||
|
||||
// And all others to be cached
|
||||
for(int i=1;i<4;i++) {
|
||||
assertEquals(cache.getBlock(singleBlocks[i].blockName),
|
||||
assertEquals(cache.getBlock(singleBlocks[i].blockName, true),
|
||||
singleBlocks[i].buf);
|
||||
assertEquals(cache.getBlock(multiBlocks[i].blockName),
|
||||
assertEquals(cache.getBlock(multiBlocks[i].blockName, true),
|
||||
multiBlocks[i].buf);
|
||||
}
|
||||
}
|
||||
@ -252,7 +252,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
// Add and get multi blocks
|
||||
cache.cacheBlock(multiBlocks[i].blockName, multiBlocks[i].buf);
|
||||
expectedCacheSize += multiBlocks[i].heapSize();
|
||||
cache.getBlock(multiBlocks[i].blockName);
|
||||
cache.getBlock(multiBlocks[i].blockName, true);
|
||||
|
||||
// Add memory blocks as such
|
||||
cache.cacheBlock(memoryBlocks[i].blockName, memoryBlocks[i].buf, true);
|
||||
@ -274,10 +274,10 @@ public class TestLruBlockCache extends TestCase {
|
||||
assertEquals(1, cache.getEvictedCount());
|
||||
|
||||
// Verify oldest single block is the one evicted
|
||||
assertEquals(null, cache.getBlock(singleBlocks[0].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[0].blockName, true));
|
||||
|
||||
// Change the oldest remaining single block to a multi
|
||||
cache.getBlock(singleBlocks[1].blockName);
|
||||
cache.getBlock(singleBlocks[1].blockName, true);
|
||||
|
||||
// Insert another single block
|
||||
cache.cacheBlock(singleBlocks[4].blockName, singleBlocks[4].buf);
|
||||
@ -287,7 +287,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
assertEquals(2, cache.getEvictedCount());
|
||||
|
||||
// Oldest multi block should be evicted now
|
||||
assertEquals(null, cache.getBlock(multiBlocks[0].blockName));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[0].blockName, true));
|
||||
|
||||
// Insert another memory block
|
||||
cache.cacheBlock(memoryBlocks[3].blockName, memoryBlocks[3].buf, true);
|
||||
@ -297,7 +297,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
assertEquals(3, cache.getEvictedCount());
|
||||
|
||||
// Oldest memory block should be evicted now
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[0].blockName));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[0].blockName, true));
|
||||
|
||||
// Add a block that is twice as big (should force two evictions)
|
||||
Block [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big");
|
||||
@ -308,12 +308,12 @@ public class TestLruBlockCache extends TestCase {
|
||||
assertEquals(6, cache.getEvictedCount());
|
||||
|
||||
// Expect three remaining singles to be evicted
|
||||
assertEquals(null, cache.getBlock(singleBlocks[2].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[3].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[4].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[2].blockName, true));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[3].blockName, true));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[4].blockName, true));
|
||||
|
||||
// Make the big block a multi block
|
||||
cache.getBlock(bigBlocks[0].blockName);
|
||||
cache.getBlock(bigBlocks[0].blockName, true);
|
||||
|
||||
// Cache another single big block
|
||||
cache.cacheBlock(bigBlocks[1].blockName, bigBlocks[1].buf);
|
||||
@ -323,9 +323,9 @@ public class TestLruBlockCache extends TestCase {
|
||||
assertEquals(9, cache.getEvictedCount());
|
||||
|
||||
// Expect three remaining multis to be evicted
|
||||
assertEquals(null, cache.getBlock(singleBlocks[1].blockName));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[1].blockName));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[2].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[1].blockName, true));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[1].blockName, true));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[2].blockName, true));
|
||||
|
||||
// Cache a big memory block
|
||||
cache.cacheBlock(bigBlocks[2].blockName, bigBlocks[2].buf, true);
|
||||
@ -335,9 +335,9 @@ public class TestLruBlockCache extends TestCase {
|
||||
assertEquals(12, cache.getEvictedCount());
|
||||
|
||||
// Expect three remaining in-memory to be evicted
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[1].blockName));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[2].blockName));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[3].blockName));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[1].blockName, true));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[2].blockName, true));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[3].blockName, true));
|
||||
|
||||
|
||||
}
|
||||
@ -364,7 +364,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
// Add 5 multi blocks
|
||||
for(Block block : multiBlocks) {
|
||||
cache.cacheBlock(block.blockName, block.buf);
|
||||
cache.getBlock(block.blockName);
|
||||
cache.getBlock(block.blockName, true);
|
||||
}
|
||||
|
||||
// Add 5 single blocks
|
||||
@ -379,10 +379,10 @@ public class TestLruBlockCache extends TestCase {
|
||||
assertEquals(4, cache.getEvictedCount());
|
||||
|
||||
// Should have been taken off equally from single and multi
|
||||
assertEquals(null, cache.getBlock(singleBlocks[0].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[1].blockName));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[0].blockName));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[1].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[0].blockName, true));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[1].blockName, true));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[0].blockName, true));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[1].blockName, true));
|
||||
|
||||
// Let's keep "scanning" by adding single blocks. From here on we only
|
||||
// expect evictions from the single bucket.
|
||||
@ -432,7 +432,7 @@ public class TestLruBlockCache extends TestCase {
|
||||
|
||||
// Add and get multi blocks
|
||||
cache.cacheBlock(multiBlocks[i].blockName, multiBlocks[i].buf);
|
||||
cache.getBlock(multiBlocks[i].blockName);
|
||||
cache.getBlock(multiBlocks[i].blockName, true);
|
||||
|
||||
// Add memory blocks as such
|
||||
cache.cacheBlock(memoryBlocks[i].blockName, memoryBlocks[i].buf, true);
|
||||
@ -452,16 +452,16 @@ public class TestLruBlockCache extends TestCase {
|
||||
|
||||
// And the oldest 5 blocks from each category should be gone
|
||||
for(int i=0;i<5;i++) {
|
||||
assertEquals(null, cache.getBlock(singleBlocks[i].blockName));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[i].blockName));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[i].blockName));
|
||||
assertEquals(null, cache.getBlock(singleBlocks[i].blockName, true));
|
||||
assertEquals(null, cache.getBlock(multiBlocks[i].blockName, true));
|
||||
assertEquals(null, cache.getBlock(memoryBlocks[i].blockName, true));
|
||||
}
|
||||
|
||||
// And the newest 5 blocks should still be accessible
|
||||
for(int i=5;i<10;i++) {
|
||||
assertEquals(singleBlocks[i].buf, cache.getBlock(singleBlocks[i].blockName));
|
||||
assertEquals(multiBlocks[i].buf, cache.getBlock(multiBlocks[i].blockName));
|
||||
assertEquals(memoryBlocks[i].buf, cache.getBlock(memoryBlocks[i].blockName));
|
||||
assertEquals(singleBlocks[i].buf, cache.getBlock(singleBlocks[i].blockName, true));
|
||||
assertEquals(multiBlocks[i].buf, cache.getBlock(multiBlocks[i].blockName, true));
|
||||
assertEquals(memoryBlocks[i].buf, cache.getBlock(memoryBlocks[i].blockName, true));
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user