HBASE-11573 Report age on eviction
This commit is contained in:
parent
69039f8620
commit
0523c34321
|
@ -30,11 +30,13 @@ Configuration config;
|
|||
bcName = bc.getClass().getSimpleName();
|
||||
}
|
||||
BlockCache [] bcs = cacheConfig == null? null: cacheConfig.getBlockCache() == null? null: cacheConfig.getBlockCache().getBlockCaches();
|
||||
// If more than one bc, show evictions in each bc listing so can compare
|
||||
boolean evictions = bcs != null && bcs.length > 1;
|
||||
</%java>
|
||||
<%import>
|
||||
java.util.Map;
|
||||
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile;
|
||||
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.AgeSnapshot;
|
||||
org.apache.hadoop.hbase.io.hfile.AgeSnapshot;
|
||||
org.apache.hadoop.hbase.io.hfile.CachedBlock;
|
||||
org.apache.hadoop.conf.Configuration;
|
||||
org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
|
@ -64,10 +66,10 @@ org.apache.hadoop.util.StringUtils;
|
|||
<& bc_stats; cacheConfig = cacheConfig &>
|
||||
</div>
|
||||
<div class="tab-pane" id="tab_bc_l1">
|
||||
<& bc_l; bc = bcs == null? bc: bcs[0]; name = "L1" &>
|
||||
<& bc_l; bc = bcs == null? bc: bcs[0]; name = "L1"; evictions = evictions; &>
|
||||
</div>
|
||||
<div class="tab-pane" id="tab_bc_l2">
|
||||
<& bc_l; bc = bcs == null? null: bcs.length <= 1? null: bcs[1]; name = "L2" &>
|
||||
<& bc_l; bc = bcs == null? null: bcs.length <= 1? null: bcs[1]; name = "L2"; evictions = evictions; &>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -168,6 +170,42 @@ org.apache.hadoop.util.StringUtils;
|
|||
</%if>
|
||||
</%def>
|
||||
|
||||
<%def evictions_tmpl>
|
||||
<%args>
|
||||
BlockCache bc;
|
||||
</%args>
|
||||
<%java>
|
||||
AgeSnapshot ageAtEvictionSnapshot = bc.getStats().getAgeAtEvictionSnapshot();
|
||||
// Only show if non-zero mean and stddev as is the case in combinedblockcache
|
||||
double mean = ageAtEvictionSnapshot.getMean();
|
||||
double stddev = ageAtEvictionSnapshot.getStdDev();
|
||||
</%java>
|
||||
<tr>
|
||||
<td>Evicted</td>
|
||||
<td><% String.format("%,d", bc.getStats().getEvictedCount()) %></td>
|
||||
<td>The total number of blocks evicted</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Evictions</td>
|
||||
<td><% String.format("%,d", bc.getStats().getEvictionCount()) %></td>
|
||||
<td>The total number of times an eviction has occurred</td>
|
||||
</tr>
|
||||
<%if mean > 0 %>
|
||||
<tr>
|
||||
<td>Mean</td>
|
||||
<td><% String.format("%,d", (long)(ageAtEvictionSnapshot.getMean()/(1000000 * 1000))) %></td>
|
||||
<td>Mean age of Blocks at eviction time (seconds)</td>
|
||||
</tr>
|
||||
</%if>
|
||||
<%if stddev > 0 %>
|
||||
<tr>
|
||||
<td>StdDev</td>
|
||||
<td><% String.format("%,d", (long)(ageAtEvictionSnapshot.getStdDev()/1000000)) %></td>
|
||||
<td>Standard Deviation for age of Blocks at eviction time</td>
|
||||
</tr>
|
||||
</%if>
|
||||
</%def>
|
||||
|
||||
<%def bc_stats>
|
||||
<%args>
|
||||
CacheConfig cacheConfig;
|
||||
|
@ -196,16 +234,7 @@ org.apache.hadoop.util.StringUtils;
|
|||
<td><% String.format("%,d", cacheConfig.getBlockCache().getBlockCount()) %></td>
|
||||
<td>Number of blocks in block cache</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Evicted</td>
|
||||
<td><% String.format("%,d", cacheConfig.getBlockCache().getStats().getEvictedCount()) %></td>
|
||||
<td>The total number of blocks evicted</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Evictions</td>
|
||||
<td><% String.format("%,d", cacheConfig.getBlockCache().getStats().getEvictionCount()) %></td>
|
||||
<td>The total number of times an eviction has occurred</td>
|
||||
</tr>
|
||||
<& evictions_tmpl; bc = cacheConfig.getBlockCache(); &>
|
||||
<tr>
|
||||
<td>Hits</td>
|
||||
<td><% String.format("%,d", cacheConfig.getBlockCache().getStats().getHitCount()) %></td>
|
||||
|
@ -241,11 +270,12 @@ are combined counts. Request count is sum of hits and misses.</p>
|
|||
<%args>
|
||||
BlockCache bc;
|
||||
String name;
|
||||
boolean evictions;
|
||||
</%args>
|
||||
<%if bc == null %>
|
||||
<p>No <% name %> deployed</p>
|
||||
<%else>
|
||||
<& block_cache; bc = bc; name = name; &>
|
||||
<& block_cache; bc = bc; name = name; evictions = evictions; &>
|
||||
</%if>
|
||||
</%def>
|
||||
|
||||
|
@ -253,6 +283,7 @@ are combined counts. Request count is sum of hits and misses.</p>
|
|||
<%args>
|
||||
BlockCache bc;
|
||||
String name;
|
||||
boolean evictions;
|
||||
</%args>
|
||||
<%java>
|
||||
final long nanosPerSecond = 1000000000;
|
||||
|
@ -260,7 +291,7 @@ are combined counts. Request count is sum of hits and misses.</p>
|
|||
String bcName = bc.getClass().getSimpleName();
|
||||
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.CachedBlocksByFile cbsbf =
|
||||
org.apache.hadoop.hbase.io.hfile.BlockCacheUtil.getLoadedCachedBlocksByFile(config, bc);
|
||||
AgeSnapshot snapshot = cbsbf.getAgeSnapshot();
|
||||
AgeSnapshot cbsbfSnapshot = cbsbf.getAgeInCacheSnapshot();
|
||||
|
||||
boolean bucketCache = bc.getClass().getSimpleName().equals("BucketCache");
|
||||
BucketCacheStats bucketCacheStats = null;
|
||||
|
@ -274,7 +305,7 @@ are combined counts. Request count is sum of hits and misses.</p>
|
|||
}
|
||||
</%java>
|
||||
<%if cbsbf.isFull() %>
|
||||
<p><b>Too many blocks!</b> Listing out the first <% snapshot.getMax() %> only (hbase.ui.blockcache.by.file.max)</p>
|
||||
<p><b>Too many blocks!</b> Listing out the first <% cbsbfSnapshot.getMax() %> only (hbase.ui.blockcache.by.file.max)</p>
|
||||
</%if>
|
||||
<table id="blocks_summary" class="table table-striped">
|
||||
<tr>
|
||||
|
@ -318,48 +349,7 @@ are combined counts. Request count is sum of hits and misses.</p>
|
|||
<td>Size of DATA Blocks</td>
|
||||
</tr>
|
||||
</%if>
|
||||
<tr>
|
||||
<td>Evicted</td>
|
||||
<td><% String.format("%,d", bc.getStats().getEvictedCount()) %></td>
|
||||
<td>The total number of blocks evicted</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Evictions</td>
|
||||
<td><% String.format("%,d", bc.getStats().getEvictionCount()) %></td>
|
||||
<td>The total number of times an eviction has occurred</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>Mean</td>
|
||||
<td><% String.format("%,d", (long)(snapshot.getMean()/nanosPerSecond)) %></td>
|
||||
<td>Mean age of Blocks in cache (seconds)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>StdDev</td>
|
||||
<td><% String.format("%,d", (long)(snapshot.getStdDev()/nanosPerSecond)) %></td>
|
||||
<td>Age standard deviation of Blocks in cache</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Min</td>
|
||||
<td><% String.format("%,d", (long)(snapshot.getMin()/nanosPerSecond)) %></td>
|
||||
<td>Min age of Blocks in cache (seconds)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Max</td>
|
||||
<td><% String.format("%,d", (long)(snapshot.getMax()/nanosPerSecond)) %></td>
|
||||
<td>Max age of Blocks in cache (seconds)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>95th Percentile</td>
|
||||
<td><% String.format("%,d", (long)(snapshot.get95thPercentile()/nanosPerSecond)) %></td>
|
||||
<td>95th percentile of age of Blocks in cache (seconds)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>99th Percentile</td>
|
||||
<td><% String.format("%,d", (long)(snapshot.get99thPercentile()/nanosPerSecond)) %></td>
|
||||
<td>99th percentile of age of Blocks in cache (seconds)</td>
|
||||
</tr>
|
||||
|
||||
<%if evictions %><& evictions_tmpl; bc = bc; &></%if>
|
||||
<%if bucketCache %>
|
||||
<tr>
|
||||
<td>Hits per Second</td>
|
||||
|
|
|
@ -53,7 +53,7 @@ com.yammer.metrics.stats.Snapshot;
|
|||
}
|
||||
CachedBlocksByFile cbsbf = BlockCacheUtil.getLoadedCachedBlocksByFile(conf, bc);
|
||||
</%java>
|
||||
<%if bcv.equals("file") %><& bc_by_file; cbsbf = cbsbf; &><%else>{<% BlockCacheUtil.toJSON(bc) %>, <% cbsbf %> }</%if>
|
||||
<%if bcv.equals("file") %><& bc_by_file; cbsbf = cbsbf; &><%else>[ <% BlockCacheUtil.toJSON(bc) %>, <% BlockCacheUtil.toJSON(cbsbf) %> ]</%if>
|
||||
<%java>
|
||||
cbsbf = null;
|
||||
</%java>
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
|||
|
||||
/**
|
||||
* An InputStream that wraps a DataInput.
|
||||
* @see DataOutputOutputStream
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class DataInputInputStream extends InputStream {
|
||||
|
|
|
@ -159,57 +159,6 @@ public class BlockCacheUtil {
|
|||
", priority=" + cb.getBlockPriority();
|
||||
}
|
||||
|
||||
/**
|
||||
* Snapshot of block cache age in cache.
|
||||
* This object is preferred because we can control how it is serialized out when JSON'ing.
|
||||
*/
|
||||
@JsonIgnoreProperties({"ageHistogram", "snapshot"})
|
||||
public static class AgeSnapshot {
|
||||
private final Histogram ageHistogram;
|
||||
private final Snapshot snapshot;
|
||||
|
||||
AgeSnapshot(final Histogram ageHistogram) {
|
||||
this.ageHistogram = ageHistogram;
|
||||
this.snapshot = ageHistogram.getSnapshot();
|
||||
}
|
||||
|
||||
public double get75thPercentile() {
|
||||
return snapshot.get75thPercentile();
|
||||
}
|
||||
|
||||
public double get95thPercentile() {
|
||||
return snapshot.get95thPercentile();
|
||||
}
|
||||
|
||||
public double get98thPercentile() {
|
||||
return snapshot.get98thPercentile();
|
||||
}
|
||||
|
||||
public double get999thPercentile() {
|
||||
return snapshot.get999thPercentile();
|
||||
}
|
||||
|
||||
public double get99thPercentile() {
|
||||
return snapshot.get99thPercentile();
|
||||
}
|
||||
|
||||
public double getMean() {
|
||||
return this.ageHistogram.mean();
|
||||
}
|
||||
|
||||
public double getMax() {
|
||||
return ageHistogram.max();
|
||||
}
|
||||
|
||||
public double getMin() {
|
||||
return ageHistogram.min();
|
||||
}
|
||||
|
||||
public double getStdDev() {
|
||||
return ageHistogram.stdDev();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a {@link CachedBlocksByFile} instance and load it up by iterating content in
|
||||
* {@link BlockCache}.
|
||||
|
@ -319,7 +268,7 @@ public class BlockCacheUtil {
|
|||
return dataSize;
|
||||
}
|
||||
|
||||
public AgeSnapshot getAgeSnapshot() {
|
||||
public AgeSnapshot getAgeInCacheSnapshot() {
|
||||
return new AgeSnapshot(this.age);
|
||||
}
|
||||
|
||||
|
|
|
@ -90,11 +90,6 @@ public class CacheConfig {
|
|||
/**
|
||||
* When using bucket cache, this is a float that EITHER represents a percentage of total heap
|
||||
* memory size to give to the cache (if < 1.0) OR, it is the capacity in megabytes of the cache.
|
||||
*
|
||||
* <p>The resultant size is further divided if {@link #BUCKET_CACHE_COMBINED_KEY} is set (It is
|
||||
* set by default. When false, bucket cache serves as an "L2" cache to the "L1"
|
||||
* {@link LruBlockCache}). The percentage is set in
|
||||
* with {@link #BUCKET_CACHE_COMBINED_PERCENTAGE_KEY} float.
|
||||
*/
|
||||
public static final String BUCKET_CACHE_SIZE_KEY = "hbase.bucketcache.size";
|
||||
|
||||
|
|
|
@ -22,11 +22,19 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
||||
import com.yammer.metrics.core.Histogram;
|
||||
import com.yammer.metrics.core.MetricsRegistry;
|
||||
|
||||
/**
|
||||
* Class that implements cache metrics.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class CacheStats {
|
||||
/**
|
||||
* Needed making histograms.
|
||||
*/
|
||||
private static final MetricsRegistry METRICS = new MetricsRegistry();
|
||||
|
||||
/** Sliding window statistics. The number of metric periods to include in
|
||||
* sliding window hit ratio calculations.
|
||||
*/
|
||||
|
@ -78,25 +86,34 @@ public class CacheStats {
|
|||
private long lastRequestCachingCount = 0;
|
||||
/** Current window index (next to be updated) */
|
||||
private int windowIndex = 0;
|
||||
/**
|
||||
* Keep running age at eviction time
|
||||
*/
|
||||
private Histogram ageAtEviction;
|
||||
private long startTime = System.nanoTime();
|
||||
|
||||
public CacheStats() {
|
||||
this(DEFAULT_WINDOW_PERIODS);
|
||||
public CacheStats(final String name) {
|
||||
this(name, DEFAULT_WINDOW_PERIODS);
|
||||
}
|
||||
|
||||
public CacheStats(int numPeriodsInWindow) {
|
||||
public CacheStats(final String name, int numPeriodsInWindow) {
|
||||
this.numPeriodsInWindow = numPeriodsInWindow;
|
||||
this.hitCounts = initializeZeros(numPeriodsInWindow);
|
||||
this.hitCachingCounts = initializeZeros(numPeriodsInWindow);
|
||||
this.requestCounts = initializeZeros(numPeriodsInWindow);
|
||||
this.requestCachingCounts = initializeZeros(numPeriodsInWindow);
|
||||
this.ageAtEviction = METRICS.newHistogram(CacheStats.class, name + ".ageAtEviction");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
AgeSnapshot snapshot = getAgeAtEvictionSnapshot();
|
||||
return "hitCount=" + getHitCount() + ", hitCachingCount=" + getHitCachingCount() +
|
||||
", missCount=" + getMissCount() + ", missCachingCount=" + getMissCachingCount() +
|
||||
", evictionCount=" + getEvictionCount() +
|
||||
", evictedBlockCount=" + getEvictedCount();
|
||||
", evictedBlockCount=" + getEvictedCount() +
|
||||
", evictedAgeMean=" + snapshot.getMean() +
|
||||
", evictedAgeStdDev=" + snapshot.getStdDev();
|
||||
}
|
||||
|
||||
public void miss(boolean caching) {
|
||||
|
@ -113,8 +130,9 @@ public class CacheStats {
|
|||
evictionCount.incrementAndGet();
|
||||
}
|
||||
|
||||
public void evicted() {
|
||||
evictedBlockCount.incrementAndGet();
|
||||
public void evicted(final long t) {
|
||||
if (t > this.startTime) this.ageAtEviction.update(t - this.startTime);
|
||||
this.evictedBlockCount.incrementAndGet();
|
||||
}
|
||||
|
||||
public long getRequestCount() {
|
||||
|
@ -146,7 +164,7 @@ public class CacheStats {
|
|||
}
|
||||
|
||||
public long getEvictedCount() {
|
||||
return evictedBlockCount.get();
|
||||
return this.evictedBlockCount.get();
|
||||
}
|
||||
|
||||
public double getHitRatio() {
|
||||
|
@ -210,6 +228,10 @@ public class CacheStats {
|
|||
return Double.isNaN(ratio) ? 0 : ratio;
|
||||
}
|
||||
|
||||
public AgeSnapshot getAgeAtEvictionSnapshot() {
|
||||
return new AgeSnapshot(this.ageAtEviction);
|
||||
}
|
||||
|
||||
private static long sum(long [] counts) {
|
||||
long sum = 0;
|
||||
for (long count : counts) sum += count;
|
||||
|
|
|
@ -127,6 +127,7 @@ public class CombinedBlockCache implements BlockCache, HeapSize {
|
|||
private final CacheStats bucketCacheStats;
|
||||
|
||||
CombinedCacheStats(CacheStats lbcStats, CacheStats fcStats) {
|
||||
super("CombinedBlockCache");
|
||||
this.lruCacheStats = lbcStats;
|
||||
this.bucketCacheStats = fcStats;
|
||||
}
|
||||
|
|
|
@ -285,7 +285,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
|
|||
this.singleFactor = singleFactor;
|
||||
this.multiFactor = multiFactor;
|
||||
this.memoryFactor = memoryFactor;
|
||||
this.stats = new CacheStats();
|
||||
this.stats = new CacheStats(this.getClass().getSimpleName());
|
||||
this.count = new AtomicLong(0);
|
||||
this.elements = new AtomicLong(0);
|
||||
this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
|
||||
|
@ -460,7 +460,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize {
|
|||
map.remove(block.getCacheKey());
|
||||
updateSizeMetrics(block, true);
|
||||
elements.decrementAndGet();
|
||||
stats.evicted();
|
||||
stats.evicted(block.getCachedTime());
|
||||
if (evictedByEvictionProcess && victimHandler != null) {
|
||||
boolean wait = getCurrentSize() < acceptableSize();
|
||||
boolean inMemory = block.getPriority() == BlockPriority.MEMORY;
|
||||
|
|
|
@ -447,7 +447,7 @@ public class BucketCache implements BlockCache, HeapSize {
|
|||
}
|
||||
}
|
||||
}
|
||||
cacheStats.evicted();
|
||||
cacheStats.evicted(bucketEntry == null? 0: bucketEntry.getCachedTime());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,6 +34,10 @@ public class BucketCacheStats extends CacheStats {
|
|||
private final static int nanoTime = 1000000;
|
||||
private long lastLogTime = EnvironmentEdgeManager.currentTimeMillis();
|
||||
|
||||
BucketCacheStats() {
|
||||
super("BucketCache");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return super.toString() + ", ioHitsPerSecond=" + getIOHitsPerSecond() +
|
||||
|
|
|
@ -67,6 +67,11 @@ public class TestBlockCacheReporting {
|
|||
bc.getBlock(bcki, true, false, true);
|
||||
}
|
||||
assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount());
|
||||
BlockCacheKey bckd = new BlockCacheKey("f", 0);
|
||||
BlockCacheKey bcki = new BlockCacheKey("f", 0 + count);
|
||||
bc.evictBlock(bckd);
|
||||
bc.evictBlock(bcki);
|
||||
bc.getStats().getEvictedCount();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -607,7 +607,7 @@ public class TestLruBlockCache {
|
|||
double delta = 0.01;
|
||||
|
||||
// 3 total periods
|
||||
CacheStats stats = new CacheStats(3);
|
||||
CacheStats stats = new CacheStats("test", 3);
|
||||
|
||||
// No accesses, should be 0
|
||||
stats.rollMetricsPeriod();
|
||||
|
|
|
@ -261,8 +261,7 @@ public class TestHeapMemoryManager {
|
|||
}
|
||||
|
||||
private static class BlockCacheStub implements ResizableBlockCache {
|
||||
|
||||
CacheStats stats = new CacheStats();
|
||||
CacheStats stats = new CacheStats("test");
|
||||
long maxSize = 0;
|
||||
|
||||
public BlockCacheStub(long size){
|
||||
|
@ -288,13 +287,13 @@ public class TestHeapMemoryManager {
|
|||
|
||||
@Override
|
||||
public boolean evictBlock(BlockCacheKey cacheKey) {
|
||||
stats.evicted();
|
||||
stats.evicted(0);
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int evictBlocksByHfileName(String hfileName) {
|
||||
stats.evicted(); // Just assuming only one block for file here.
|
||||
stats.evicted(0); // Just assuming only one block for file here.
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue