HBASE-2865 Cleanup of LRU logging; its hard to read, uses custom MB'maker, repeats info, too many numbers after the point, etc.

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@966840 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2010-07-22 20:17:24 +00:00
parent 21cbe58d6e
commit 9bb7a8ae65
7 changed files with 72 additions and 66 deletions

View File

@ -790,6 +790,8 @@ Release 0.21.0 - Unreleased
space and text
HBASE-2850 slf4j version needs to be reconciled in pom: thrift wants 1.5.x
and hadoop/avro 1.4.x
HBASE-2865 Cleanup of LRU logging; its hard to read, uses custom MB'maker,
repeats info, too many numbers after the point, etc.
NEW FEATURES
HBASE-1961 HBase EC2 scripts

View File

@ -28,7 +28,7 @@
<outputDirectory>/</outputDirectory>
<includes>
<include>hbase-${project.version}.jar</include>
<include>hbase-${project.version}-tests.jar</include>
<include>hbase-${project.version}-test.jar</include>
</includes>
</fileSet>
<fileSet>

View File

@ -109,4 +109,3 @@ public class CachedBlock implements HeapSize, Comparable<CachedBlock> {
return this.priority;
}
}

View File

@ -84,15 +84,14 @@ public class CachedBlockQueue implements HeapSize {
}
/**
* Get a sorted List of all elements in this queue, in descending order.
* @return list of cached elements in descending order
* @return a sorted List of all elements in this queue, in descending order
*/
public CachedBlock [] get() {
public LinkedList<CachedBlock> get() {
LinkedList<CachedBlock> blocks = new LinkedList<CachedBlock>();
while(!queue.isEmpty()) {
while (!queue.isEmpty()) {
blocks.addFirst(queue.poll());
}
return blocks.toArray(new CachedBlock[blocks.size()]);
return blocks;
}
/**

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io.hfile;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.PriorityQueue;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
@ -34,6 +35,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.util.StringUtils;
/**
* A block cache implementation that is memory-aware using {@link HeapSize},
@ -317,11 +319,14 @@ public class LruBlockCache implements BlockCache, HeapSize {
try {
evictionInProgress = true;
long currentSize = this.size.get();
long bytesToFree = currentSize - minSize();
long bytesToFree = size.get() - minSize();
LOG.debug("Block cache LRU eviction started. Attempting to free " +
bytesToFree + " bytes");
if (LOG.isDebugEnabled()) {
LOG.debug("Block cache LRU eviction started; Attempting to free " +
StringUtils.byteDesc(bytesToFree) + " of total=" +
StringUtils.byteDesc(currentSize));
}
if(bytesToFree <= 0) return;
@ -372,17 +377,17 @@ public class LruBlockCache implements BlockCache, HeapSize {
remainingBuckets--;
}
float singleMB = ((float)bucketSingle.totalSize())/((float)(1024*1024));
float multiMB = ((float)bucketMulti.totalSize())/((float)(1024*1024));
float memoryMB = ((float)bucketMemory.totalSize())/((float)(1024*1024));
LOG.debug("Block cache LRU eviction completed. " +
"Freed " + bytesFreed + " bytes. " +
"Priority Sizes: " +
"Single=" + singleMB + "MB (" + bucketSingle.totalSize() + "), " +
"Multi=" + multiMB + "MB (" + bucketMulti.totalSize() + ")," +
"Memory=" + memoryMB + "MB (" + bucketMemory.totalSize() + ")");
if (LOG.isDebugEnabled()) {
long single = bucketSingle.totalSize();
long multi = bucketMulti.totalSize();
long memory = bucketMemory.totalSize();
LOG.debug("Block cache LRU eviction completed; " +
"freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
"total=" + StringUtils.byteDesc(this.size.get()) + ", " +
"single=" + StringUtils.byteDesc(single) + ", " +
"multi=" + StringUtils.byteDesc(multi) + ", " +
"memory=" + StringUtils.byteDesc(memory));
}
} finally {
stats.evict();
evictionInProgress = false;
@ -414,10 +419,10 @@ public class LruBlockCache implements BlockCache, HeapSize {
}
public long free(long toFree) {
CachedBlock [] blocks = queue.get();
LinkedList<CachedBlock> blocks = queue.get();
long freedBytes = 0;
for(int i=0; i<blocks.length; i++) {
freedBytes += evictBlock(blocks[i]);
for(CachedBlock cb: blocks) {
freedBytes += evictBlock(cb);
if(freedBytes >= toFree) {
return freedBytes;
}
@ -524,7 +529,7 @@ public class LruBlockCache implements BlockCache, HeapSize {
/*
* Statistics thread. Periodically prints the cache statistics to the log.
*/
private static class StatisticsThread extends Thread {
static class StatisticsThread extends Thread {
LruBlockCache lru;
public StatisticsThread(LruBlockCache lru) {
@ -539,27 +544,23 @@ public class LruBlockCache implements BlockCache, HeapSize {
}
public void logStats() {
if (!LOG.isDebugEnabled()) return;
// Log size
long totalSize = heapSize();
long freeSize = maxSize - totalSize;
float sizeMB = ((float)totalSize)/((float)(1024*1024));
float freeMB = ((float)freeSize)/((float)(1024*1024));
float maxMB = ((float)maxSize)/((float)(1024*1024));
LruBlockCache.LOG.debug("Cache Stats: Sizes: " +
"Total=" + sizeMB + "MB (" + totalSize + "), " +
"Free=" + freeMB + "MB (" + freeSize + "), " +
"Max=" + maxMB + "MB (" + maxSize +")" +
", Counts: " +
"Blocks=" + size() +", " +
"Access=" + stats.getRequestCount() + ", " +
"Hit=" + stats.getHitCount() + ", " +
"Miss=" + stats.getMissCount() + ", " +
"Evictions=" + stats.getEvictionCount() + ", " +
"Evicted=" + stats.getEvictedCount() +
", Ratios: " +
"Hit Ratio=" + stats.getHitRatio()*100 + "%, " +
"Miss Ratio=" + stats.getMissRatio()*100 + "%, " +
"Evicted/Run=" + stats.evictedPerEviction());
LruBlockCache.LOG.debug("LRU Stats: " +
"total=" + StringUtils.byteDesc(totalSize) + ", " +
"free=" + StringUtils.byteDesc(freeSize) + ", " +
"max=" + StringUtils.byteDesc(this.maxSize) + ", " +
"blocks=" + size() +", " +
"accesses=" + stats.getRequestCount() + ", " +
"hits=" + stats.getHitCount() + ", " +
"misses=" + stats.getMissCount() + ", " +
"evictions=" + stats.getEvictionCount() + ", " +
"evicted=" + stats.getEvictedCount() + ", " +
"hitRatio=" + StringUtils.formatPercent(stats.getHitRatio(), 2) + "%, " +
"missRatio=" + StringUtils.formatPercent(stats.getMissRatio(), 2) + "%, " +
"evictedPerRun=" + stats.evictedPerEviction());
}
/**

View File

@ -20,6 +20,8 @@
package org.apache.hadoop.hbase.io.hfile;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import junit.framework.TestCase;
public class TestCachedBlockQueue extends TestCase {
@ -57,15 +59,16 @@ public class TestCachedBlockQueue extends TestCase {
assertEquals(queue.heapSize(), expectedSize);
org.apache.hadoop.hbase.io.hfile.CachedBlock [] blocks = queue.get();
assertEquals(blocks[0].getName(), "cb1");
assertEquals(blocks[1].getName(), "cb2");
assertEquals(blocks[2].getName(), "cb3");
assertEquals(blocks[3].getName(), "cb4");
assertEquals(blocks[4].getName(), "cb5");
assertEquals(blocks[5].getName(), "cb6");
assertEquals(blocks[6].getName(), "cb7");
assertEquals(blocks[7].getName(), "cb8");
LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks =
queue.get();
assertEquals(blocks.poll().getName(), "cb1");
assertEquals(blocks.poll().getName(), "cb2");
assertEquals(blocks.poll().getName(), "cb3");
assertEquals(blocks.poll().getName(), "cb4");
assertEquals(blocks.poll().getName(), "cb5");
assertEquals(blocks.poll().getName(), "cb6");
assertEquals(blocks.poll().getName(), "cb7");
assertEquals(blocks.poll().getName(), "cb8");
}
@ -109,16 +112,16 @@ public class TestCachedBlockQueue extends TestCase {
assertEquals(queue.heapSize(), expectedSize);
org.apache.hadoop.hbase.io.hfile.CachedBlock [] blocks = queue.get();
assertEquals(blocks[0].getName(), "cb0");
assertEquals(blocks[1].getName(), "cb1");
assertEquals(blocks[2].getName(), "cb2");
assertEquals(blocks[3].getName(), "cb3");
assertEquals(blocks[4].getName(), "cb4");
assertEquals(blocks[5].getName(), "cb5");
assertEquals(blocks[6].getName(), "cb6");
assertEquals(blocks[7].getName(), "cb7");
assertEquals(blocks[8].getName(), "cb8");
LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks = queue.get();
assertEquals(blocks.poll().getName(), "cb0");
assertEquals(blocks.poll().getName(), "cb1");
assertEquals(blocks.poll().getName(), "cb2");
assertEquals(blocks.poll().getName(), "cb3");
assertEquals(blocks.poll().getName(), "cb4");
assertEquals(blocks.poll().getName(), "cb5");
assertEquals(blocks.poll().getName(), "cb6");
assertEquals(blocks.poll().getName(), "cb7");
assertEquals(blocks.poll().getName(), "cb8");
}
@ -130,5 +133,4 @@ public class TestCachedBlockQueue extends TestCase {
accessTime,false);
}
}
}

View File

@ -117,6 +117,9 @@ public class TestLruBlockCache extends TestCase {
// Expect no evictions
assertEquals(0, cache.getEvictionCount());
Thread t = new LruBlockCache.StatisticsThread(cache);
t.start();
t.join();
}
public void testCacheEvictionSimple() throws Exception {