HBASE-11573 Report age on eviction

This commit is contained in:
stack 2014-07-23 13:25:12 -07:00
parent 69039f8620
commit 0523c34321
13 changed files with 97 additions and 133 deletions

View File

@ -30,11 +30,13 @@ Configuration config;
bcName = bc.getClass().getSimpleName(); bcName = bc.getClass().getSimpleName();
} }
BlockCache [] bcs = cacheConfig == null? null: cacheConfig.getBlockCache() == null? null: cacheConfig.getBlockCache().getBlockCaches(); BlockCache [] bcs = cacheConfig == null? null: cacheConfig.getBlockCache() == null? null: cacheConfig.getBlockCache().getBlockCaches();
// If more than one bc, show evictions in each bc listing so can compare
boolean evictions = bcs != null && bcs.length > 1;
</%java> </%java>
<%import> <%import>
java.util.Map; java.util.Map;
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile; org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile;
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.AgeSnapshot; org.apache.hadoop.hbase.io.hfile.AgeSnapshot;
org.apache.hadoop.hbase.io.hfile.CachedBlock; org.apache.hadoop.hbase.io.hfile.CachedBlock;
org.apache.hadoop.conf.Configuration; org.apache.hadoop.conf.Configuration;
org.apache.hadoop.hbase.io.hfile.CacheConfig; org.apache.hadoop.hbase.io.hfile.CacheConfig;
@ -64,10 +66,10 @@ org.apache.hadoop.util.StringUtils;
<& bc_stats; cacheConfig = cacheConfig &> <& bc_stats; cacheConfig = cacheConfig &>
</div> </div>
<div class="tab-pane" id="tab_bc_l1"> <div class="tab-pane" id="tab_bc_l1">
<& bc_l; bc = bcs == null? bc: bcs[0]; name = "L1" &> <& bc_l; bc = bcs == null? bc: bcs[0]; name = "L1"; evictions = evictions; &>
</div> </div>
<div class="tab-pane" id="tab_bc_l2"> <div class="tab-pane" id="tab_bc_l2">
<& bc_l; bc = bcs == null? null: bcs.length <= 1? null: bcs[1]; name = "L2" &> <& bc_l; bc = bcs == null? null: bcs.length <= 1? null: bcs[1]; name = "L2"; evictions = evictions; &>
</div> </div>
</div> </div>
</div> </div>
@ -168,6 +170,42 @@ org.apache.hadoop.util.StringUtils;
</%if> </%if>
</%def> </%def>
<%def evictions_tmpl>
<%args>
BlockCache bc;
</%args>
<%java>
AgeSnapshot ageAtEvictionSnapshot = bc.getStats().getAgeAtEvictionSnapshot();
// Only show if non-zero mean and stddev as is the case in combinedblockcache
double mean = ageAtEvictionSnapshot.getMean();
double stddev = ageAtEvictionSnapshot.getStdDev();
</%java>
<tr>
<td>Evicted</td>
<td><% String.format("%,d", bc.getStats().getEvictedCount()) %></td>
<td>The total number of blocks evicted</td>
</tr>
<tr>
<td>Evictions</td>
<td><% String.format("%,d", bc.getStats().getEvictionCount()) %></td>
<td>The total number of times an eviction has occurred</td>
</tr>
<%if mean > 0 %>
<tr>
<td>Mean</td>
<td><% String.format("%,d", (long)(ageAtEvictionSnapshot.getMean()/(1000000 * 1000))) %></td>
<td>Mean age of Blocks at eviction time (seconds)</td>
</tr>
</%if>
<%if stddev > 0 %>
<tr>
<td>StdDev</td>
<td><% String.format("%,d", (long)(ageAtEvictionSnapshot.getStdDev()/1000000)) %></td>
<td>Standard Deviation for age of Blocks at eviction time</td>
</tr>
</%if>
</%def>
<%def bc_stats> <%def bc_stats>
<%args> <%args>
CacheConfig cacheConfig; CacheConfig cacheConfig;
@ -196,16 +234,7 @@ org.apache.hadoop.util.StringUtils;
<td><% String.format("%,d", cacheConfig.getBlockCache().getBlockCount()) %></td> <td><% String.format("%,d", cacheConfig.getBlockCache().getBlockCount()) %></td>
<td>Number of blocks in block cache</td> <td>Number of blocks in block cache</td>
</tr> </tr>
<tr> <& evictions_tmpl; bc = cacheConfig.getBlockCache(); &>
<td>Evicted</td>
<td><% String.format("%,d", cacheConfig.getBlockCache().getStats().getEvictedCount()) %></td>
<td>The total number of blocks evicted</td>
</tr>
<tr>
<td>Evictions</td>
<td><% String.format("%,d", cacheConfig.getBlockCache().getStats().getEvictionCount()) %></td>
<td>The total number of times an eviction has occurred</td>
</tr>
<tr> <tr>
<td>Hits</td> <td>Hits</td>
<td><% String.format("%,d", cacheConfig.getBlockCache().getStats().getHitCount()) %></td> <td><% String.format("%,d", cacheConfig.getBlockCache().getStats().getHitCount()) %></td>
@ -241,11 +270,12 @@ are combined counts. Request count is sum of hits and misses.</p>
<%args> <%args>
BlockCache bc; BlockCache bc;
String name; String name;
boolean evictions;
</%args> </%args>
<%if bc == null %> <%if bc == null %>
<p>No <% name %> deployed</p> <p>No <% name %> deployed</p>
<%else> <%else>
<& block_cache; bc = bc; name = name; &> <& block_cache; bc = bc; name = name; evictions = evictions; &>
</%if> </%if>
</%def> </%def>
@ -253,6 +283,7 @@ are combined counts. Request count is sum of hits and misses.</p>
<%args> <%args>
BlockCache bc; BlockCache bc;
String name; String name;
boolean evictions;
</%args> </%args>
<%java> <%java>
final long nanosPerSecond = 1000000000; final long nanosPerSecond = 1000000000;
@ -260,7 +291,7 @@ are combined counts. Request count is sum of hits and misses.</p>
String bcName = bc.getClass().getSimpleName(); String bcName = bc.getClass().getSimpleName();
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile cbsbf = org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile cbsbf =
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.getLoadedCachedBlocksByFile(config, bc); org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.getLoadedCachedBlocksByFile(config, bc);
AgeSnapshot snapshot = cbsbf.getAgeSnapshot(); AgeSnapshot cbsbfSnapshot = cbsbf.getAgeInCacheSnapshot();
boolean bucketCache = bc.getClass().getSimpleName().equals("BucketCache"); boolean bucketCache = bc.getClass().getSimpleName().equals("BucketCache");
BucketCacheStats bucketCacheStats = null; BucketCacheStats bucketCacheStats = null;
@ -274,7 +305,7 @@ are combined counts. Request count is sum of hits and misses.</p>
} }
</%java> </%java>
<%if cbsbf.isFull() %> <%if cbsbf.isFull() %>
<p><b>Too many blocks!</b> Listing out the first <% snapshot.getMax() %> only (hbase.ui.blockcache.by.file.max)</p> <p><b>Too many blocks!</b> Listing out the first <% cbsbfSnapshot.getMax() %> only (hbase.ui.blockcache.by.file.max)</p>
</%if> </%if>
<table id="blocks_summary" class="table table-striped"> <table id="blocks_summary" class="table table-striped">
<tr> <tr>
@ -318,48 +349,7 @@ are combined counts. Request count is sum of hits and misses.</p>
<td>Size of DATA Blocks</td> <td>Size of DATA Blocks</td>
</tr> </tr>
</%if> </%if>
<tr> <%if evictions %><& evictions_tmpl; bc = bc; &></%if>
<td>Evicted</td>
<td><% String.format("%,d", bc.getStats().getEvictedCount()) %></td>
<td>The total number of blocks evicted</td>
</tr>
<tr>
<td>Evictions</td>
<td><% String.format("%,d", bc.getStats().getEvictionCount()) %></td>
<td>The total number of times an eviction has occurred</td>
</tr>
<tr>
<td>Mean</td>
<td><% String.format("%,d", (long)(snapshot.getMean()/nanosPerSecond)) %></td>
<td>Mean age of Blocks in cache (seconds)</td>
</tr>
<tr>
<td>StdDev</td>
<td><% String.format("%,d", (long)(snapshot.getStdDev()/nanosPerSecond)) %></td>
<td>Age standard deviation of Blocks in cache</td>
</tr>
<tr>
<td>Min</td>
<td><% String.format("%,d", (long)(snapshot.getMin()/nanosPerSecond)) %></td>
<td>Min age of Blocks in cache (seconds)</td>
</tr>
<tr>
<td>Max</td>
<td><% String.format("%,d", (long)(snapshot.getMax()/nanosPerSecond)) %></td>
<td>Max age of Blocks in cache (seconds)</td>
</tr>
<tr>
<td>95th Percentile</td>
<td><% String.format("%,d", (long)(snapshot.get95thPercentile()/nanosPerSecond)) %></td>
<td>95th percentile of age of Blocks in cache (seconds)</td>
</tr>
<tr>
<td>99th Percentile</td>
<td><% String.format("%,d", (long)(snapshot.get99thPercentile()/nanosPerSecond)) %></td>
<td>99th percentile of age of Blocks in cache (seconds)</td>
</tr>
<%if bucketCache %> <%if bucketCache %>
<tr> <tr>
<td>Hits per Second</td> <td>Hits per Second</td>

View File

@ -53,7 +53,7 @@ com.yammer.metrics.stats.Snapshot;
} }
CachedBlocksByFile cbsbf = BlockCacheUtil.getLoadedCachedBlocksByFile(conf, bc); CachedBlocksByFile cbsbf = BlockCacheUtil.getLoadedCachedBlocksByFile(conf, bc);
</%java> </%java>
<%if bcv.equals("file") %><& bc_by_file; cbsbf = cbsbf; &><%else>{<% BlockCacheUtil.toJSON(bc) %>, <% cbsbf %> }</%if> <%if bcv.equals("file") %><& bc_by_file; cbsbf = cbsbf; &><%else>[ <% BlockCacheUtil.toJSON(bc) %>, <% BlockCacheUtil.toJSON(cbsbf) %> ]</%if>
<%java> <%java>
cbsbf = null; cbsbf = null;
</%java> </%java>

View File

@ -26,7 +26,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* An InputStream that wraps a DataInput. * An InputStream that wraps a DataInput.
* @see DataOutputOutputStream
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class DataInputInputStream extends InputStream { public class DataInputInputStream extends InputStream {

View File

@ -159,57 +159,6 @@ public class BlockCacheUtil {
", priority=" + cb.getBlockPriority(); ", priority=" + cb.getBlockPriority();
} }
/**
* Snapshot of block cache age in cache.
* This object is preferred because we can control how it is serialized out when JSON'ing.
*/
@JsonIgnoreProperties({"ageHistogram", "snapshot"})
public static class AgeSnapshot {
private final Histogram ageHistogram;
private final Snapshot snapshot;
AgeSnapshot(final Histogram ageHistogram) {
this.ageHistogram = ageHistogram;
this.snapshot = ageHistogram.getSnapshot();
}
public double get75thPercentile() {
return snapshot.get75thPercentile();
}
public double get95thPercentile() {
return snapshot.get95thPercentile();
}
public double get98thPercentile() {
return snapshot.get98thPercentile();
}
public double get999thPercentile() {
return snapshot.get999thPercentile();
}
public double get99thPercentile() {
return snapshot.get99thPercentile();
}
public double getMean() {
return this.ageHistogram.mean();
}
public double getMax() {
return ageHistogram.max();
}
public double getMin() {
return ageHistogram.min();
}
public double getStdDev() {
return ageHistogram.stdDev();
}
}
/** /**
* Get a {@link CachedBlocksByFile} instance and load it up by iterating content in * Get a {@link CachedBlocksByFile} instance and load it up by iterating content in
* {@link BlockCache}. * {@link BlockCache}.
@ -319,7 +268,7 @@ public class BlockCacheUtil {
return dataSize; return dataSize;
} }
public AgeSnapshot getAgeSnapshot() { public AgeSnapshot getAgeInCacheSnapshot() {
return new AgeSnapshot(this.age); return new AgeSnapshot(this.age);
} }

View File

@ -90,11 +90,6 @@ public class CacheConfig {
/** /**
* When using bucket cache, this is a float that EITHER represents a percentage of total heap * When using bucket cache, this is a float that EITHER represents a percentage of total heap
* memory size to give to the cache (if < 1.0) OR, it is the capacity in megabytes of the cache. * memory size to give to the cache (if < 1.0) OR, it is the capacity in megabytes of the cache.
*
* <p>The resultant size is further divided if {@link #BUCKET_CACHE_COMBINED_KEY} is set (It is
* set by default. When false, bucket cache serves as an "L2" cache to the "L1"
* {@link LruBlockCache}). The percentage is set in
* with {@link #BUCKET_CACHE_COMBINED_PERCENTAGE_KEY} float.
*/ */
public static final String BUCKET_CACHE_SIZE_KEY = "hbase.bucketcache.size"; public static final String BUCKET_CACHE_SIZE_KEY = "hbase.bucketcache.size";

View File

@ -22,11 +22,19 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.core.MetricsRegistry;
/** /**
* Class that implements cache metrics. * Class that implements cache metrics.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class CacheStats { public class CacheStats {
/**
* Needed making histograms.
*/
private static final MetricsRegistry METRICS = new MetricsRegistry();
/** Sliding window statistics. The number of metric periods to include in /** Sliding window statistics. The number of metric periods to include in
* sliding window hit ratio calculations. * sliding window hit ratio calculations.
*/ */
@ -78,25 +86,34 @@ public class CacheStats {
private long lastRequestCachingCount = 0; private long lastRequestCachingCount = 0;
/** Current window index (next to be updated) */ /** Current window index (next to be updated) */
private int windowIndex = 0; private int windowIndex = 0;
/**
* Keep running age at eviction time
*/
private Histogram ageAtEviction;
private long startTime = System.nanoTime();
public CacheStats() { public CacheStats(final String name) {
this(DEFAULT_WINDOW_PERIODS); this(name, DEFAULT_WINDOW_PERIODS);
} }
public CacheStats(int numPeriodsInWindow) { public CacheStats(final String name, int numPeriodsInWindow) {
this.numPeriodsInWindow = numPeriodsInWindow; this.numPeriodsInWindow = numPeriodsInWindow;
this.hitCounts = initializeZeros(numPeriodsInWindow); this.hitCounts = initializeZeros(numPeriodsInWindow);
this.hitCachingCounts = initializeZeros(numPeriodsInWindow); this.hitCachingCounts = initializeZeros(numPeriodsInWindow);
this.requestCounts = initializeZeros(numPeriodsInWindow); this.requestCounts = initializeZeros(numPeriodsInWindow);
this.requestCachingCounts = initializeZeros(numPeriodsInWindow); this.requestCachingCounts = initializeZeros(numPeriodsInWindow);
this.ageAtEviction = METRICS.newHistogram(CacheStats.class, name + ".ageAtEviction");
} }
@Override @Override
public String toString() { public String toString() {
AgeSnapshot snapshot = getAgeAtEvictionSnapshot();
return "hitCount=" + getHitCount() + ", hitCachingCount=" + getHitCachingCount() + return "hitCount=" + getHitCount() + ", hitCachingCount=" + getHitCachingCount() +
", missCount=" + getMissCount() + ", missCachingCount=" + getMissCachingCount() + ", missCount=" + getMissCount() + ", missCachingCount=" + getMissCachingCount() +
", evictionCount=" + getEvictionCount() + ", evictionCount=" + getEvictionCount() +
", evictedBlockCount=" + getEvictedCount(); ", evictedBlockCount=" + getEvictedCount() +
", evictedAgeMean=" + snapshot.getMean() +
", evictedAgeStdDev=" + snapshot.getStdDev();
} }
public void miss(boolean caching) { public void miss(boolean caching) {
@ -113,8 +130,9 @@ public class CacheStats {
evictionCount.incrementAndGet(); evictionCount.incrementAndGet();
} }
public void evicted() { public void evicted(final long t) {
evictedBlockCount.incrementAndGet(); if (t > this.startTime) this.ageAtEviction.update(t - this.startTime);
this.evictedBlockCount.incrementAndGet();
} }
public long getRequestCount() { public long getRequestCount() {
@ -146,7 +164,7 @@ public class CacheStats {
} }
public long getEvictedCount() { public long getEvictedCount() {
return evictedBlockCount.get(); return this.evictedBlockCount.get();
} }
public double getHitRatio() { public double getHitRatio() {
@ -210,6 +228,10 @@ public class CacheStats {
return Double.isNaN(ratio) ? 0 : ratio; return Double.isNaN(ratio) ? 0 : ratio;
} }
public AgeSnapshot getAgeAtEvictionSnapshot() {
return new AgeSnapshot(this.ageAtEviction);
}
private static long sum(long [] counts) { private static long sum(long [] counts) {
long sum = 0; long sum = 0;
for (long count : counts) sum += count; for (long count : counts) sum += count;

View File

@ -127,6 +127,7 @@ public class CombinedBlockCache implements BlockCache, HeapSize {
private final CacheStats bucketCacheStats; private final CacheStats bucketCacheStats;
CombinedCacheStats(CacheStats lbcStats, CacheStats fcStats) { CombinedCacheStats(CacheStats lbcStats, CacheStats fcStats) {
super("CombinedBlockCache");
this.lruCacheStats = lbcStats; this.lruCacheStats = lbcStats;
this.bucketCacheStats = fcStats; this.bucketCacheStats = fcStats;
} }

View File

@ -285,7 +285,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
this.singleFactor = singleFactor; this.singleFactor = singleFactor;
this.multiFactor = multiFactor; this.multiFactor = multiFactor;
this.memoryFactor = memoryFactor; this.memoryFactor = memoryFactor;
this.stats = new CacheStats(); this.stats = new CacheStats(this.getClass().getSimpleName());
this.count = new AtomicLong(0); this.count = new AtomicLong(0);
this.elements = new AtomicLong(0); this.elements = new AtomicLong(0);
this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel); this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
@ -460,7 +460,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
map.remove(block.getCacheKey()); map.remove(block.getCacheKey());
updateSizeMetrics(block, true); updateSizeMetrics(block, true);
elements.decrementAndGet(); elements.decrementAndGet();
stats.evicted(); stats.evicted(block.getCachedTime());
if (evictedByEvictionProcess && victimHandler != null) { if (evictedByEvictionProcess && victimHandler != null) {
boolean wait = getCurrentSize() < acceptableSize(); boolean wait = getCurrentSize() < acceptableSize();
boolean inMemory = block.getPriority() == BlockPriority.MEMORY; boolean inMemory = block.getPriority() == BlockPriority.MEMORY;

View File

@ -447,7 +447,7 @@ public class BucketCache implements BlockCache, HeapSize {
} }
} }
} }
cacheStats.evicted(); cacheStats.evicted(bucketEntry == null? 0: bucketEntry.getCachedTime());
return true; return true;
} }

View File

@ -34,6 +34,10 @@ public class BucketCacheStats extends CacheStats {
private final static int nanoTime = 1000000; private final static int nanoTime = 1000000;
private long lastLogTime = EnvironmentEdgeManager.currentTimeMillis(); private long lastLogTime = EnvironmentEdgeManager.currentTimeMillis();
BucketCacheStats() {
super("BucketCache");
}
@Override @Override
public String toString() { public String toString() {
return super.toString() + ", ioHitsPerSecond=" + getIOHitsPerSecond() + return super.toString() + ", ioHitsPerSecond=" + getIOHitsPerSecond() +

View File

@ -67,6 +67,11 @@ public class TestBlockCacheReporting {
bc.getBlock(bcki, true, false, true); bc.getBlock(bcki, true, false, true);
} }
assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount()); assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount());
BlockCacheKey bckd = new BlockCacheKey("f", 0);
BlockCacheKey bcki = new BlockCacheKey("f", 0 + count);
bc.evictBlock(bckd);
bc.evictBlock(bcki);
bc.getStats().getEvictedCount();
} }
@Test @Test

View File

@ -607,7 +607,7 @@ public class TestLruBlockCache {
double delta = 0.01; double delta = 0.01;
// 3 total periods // 3 total periods
CacheStats stats = new CacheStats(3); CacheStats stats = new CacheStats("test", 3);
// No accesses, should be 0 // No accesses, should be 0
stats.rollMetricsPeriod(); stats.rollMetricsPeriod();

View File

@ -261,8 +261,7 @@ public class TestHeapMemoryManager {
} }
private static class BlockCacheStub implements ResizableBlockCache { private static class BlockCacheStub implements ResizableBlockCache {
CacheStats stats = new CacheStats("test");
CacheStats stats = new CacheStats();
long maxSize = 0; long maxSize = 0;
public BlockCacheStub(long size){ public BlockCacheStub(long size){
@ -288,13 +287,13 @@ public class TestHeapMemoryManager {
@Override @Override
public boolean evictBlock(BlockCacheKey cacheKey) { public boolean evictBlock(BlockCacheKey cacheKey) {
stats.evicted(); stats.evicted(0);
return false; return false;
} }
@Override @Override
public int evictBlocksByHfileName(String hfileName) { public int evictBlocksByHfileName(String hfileName) {
stats.evicted(); // Just assuming only one block for file here. stats.evicted(0); // Just assuming only one block for file here.
return 0; return 0;
} }