HBASE-2865 Cleanup of LRU logging; its hard to read, uses custom MB'maker, repeats info, too many numbers after the point, etc.

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@966840 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2010-07-22 20:17:24 +00:00
parent 21cbe58d6e
commit 9bb7a8ae65
7 changed files with 72 additions and 66 deletions

View File

@ -790,6 +790,8 @@ Release 0.21.0 - Unreleased
space and text space and text
HBASE-2850 slf4j version needs to be reconciled in pom: thrift wants 1.5.x HBASE-2850 slf4j version needs to be reconciled in pom: thrift wants 1.5.x
and hadoop/avro 1.4.x and hadoop/avro 1.4.x
HBASE-2865 Cleanup of LRU logging; its hard to read, uses custom MB'maker,
repeats info, too many numbers after the point, etc.
NEW FEATURES NEW FEATURES
HBASE-1961 HBase EC2 scripts HBASE-1961 HBase EC2 scripts

View File

@ -28,7 +28,7 @@
<outputDirectory>/</outputDirectory> <outputDirectory>/</outputDirectory>
<includes> <includes>
<include>hbase-${project.version}.jar</include> <include>hbase-${project.version}.jar</include>
<include>hbase-${project.version}-tests.jar</include> <include>hbase-${project.version}-test.jar</include>
</includes> </includes>
</fileSet> </fileSet>
<fileSet> <fileSet>

View File

@ -109,4 +109,3 @@ public class CachedBlock implements HeapSize, Comparable<CachedBlock> {
return this.priority; return this.priority;
} }
} }

View File

@ -84,15 +84,14 @@ public class CachedBlockQueue implements HeapSize {
} }
/** /**
* Get a sorted List of all elements in this queue, in descending order. * @return a sorted List of all elements in this queue, in descending order
* @return list of cached elements in descending order
*/ */
public CachedBlock [] get() { public LinkedList<CachedBlock> get() {
LinkedList<CachedBlock> blocks = new LinkedList<CachedBlock>(); LinkedList<CachedBlock> blocks = new LinkedList<CachedBlock>();
while(!queue.isEmpty()) { while (!queue.isEmpty()) {
blocks.addFirst(queue.poll()); blocks.addFirst(queue.poll());
} }
return blocks.toArray(new CachedBlock[blocks.size()]); return blocks;
} }
/** /**

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io.hfile;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.PriorityQueue; import java.util.PriorityQueue;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
@ -34,6 +35,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.util.StringUtils;
/** /**
* A block cache implementation that is memory-aware using {@link HeapSize}, * A block cache implementation that is memory-aware using {@link HeapSize},
@ -317,11 +319,14 @@ public class LruBlockCache implements BlockCache, HeapSize {
try { try {
evictionInProgress = true; evictionInProgress = true;
long currentSize = this.size.get();
long bytesToFree = currentSize - minSize();
long bytesToFree = size.get() - minSize(); if (LOG.isDebugEnabled()) {
LOG.debug("Block cache LRU eviction started; Attempting to free " +
LOG.debug("Block cache LRU eviction started. Attempting to free " + StringUtils.byteDesc(bytesToFree) + " of total=" +
bytesToFree + " bytes"); StringUtils.byteDesc(currentSize));
}
if(bytesToFree <= 0) return; if(bytesToFree <= 0) return;
@ -372,17 +377,17 @@ public class LruBlockCache implements BlockCache, HeapSize {
remainingBuckets--; remainingBuckets--;
} }
float singleMB = ((float)bucketSingle.totalSize())/((float)(1024*1024)); if (LOG.isDebugEnabled()) {
float multiMB = ((float)bucketMulti.totalSize())/((float)(1024*1024)); long single = bucketSingle.totalSize();
float memoryMB = ((float)bucketMemory.totalSize())/((float)(1024*1024)); long multi = bucketMulti.totalSize();
long memory = bucketMemory.totalSize();
LOG.debug("Block cache LRU eviction completed. " + LOG.debug("Block cache LRU eviction completed; " +
"Freed " + bytesFreed + " bytes. " + "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
"Priority Sizes: " + "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
"Single=" + singleMB + "MB (" + bucketSingle.totalSize() + "), " + "single=" + StringUtils.byteDesc(single) + ", " +
"Multi=" + multiMB + "MB (" + bucketMulti.totalSize() + ")," + "multi=" + StringUtils.byteDesc(multi) + ", " +
"Memory=" + memoryMB + "MB (" + bucketMemory.totalSize() + ")"); "memory=" + StringUtils.byteDesc(memory));
}
} finally { } finally {
stats.evict(); stats.evict();
evictionInProgress = false; evictionInProgress = false;
@ -414,10 +419,10 @@ public class LruBlockCache implements BlockCache, HeapSize {
} }
public long free(long toFree) { public long free(long toFree) {
CachedBlock [] blocks = queue.get(); LinkedList<CachedBlock> blocks = queue.get();
long freedBytes = 0; long freedBytes = 0;
for(int i=0; i<blocks.length; i++) { for(CachedBlock cb: blocks) {
freedBytes += evictBlock(blocks[i]); freedBytes += evictBlock(cb);
if(freedBytes >= toFree) { if(freedBytes >= toFree) {
return freedBytes; return freedBytes;
} }
@ -524,7 +529,7 @@ public class LruBlockCache implements BlockCache, HeapSize {
/* /*
* Statistics thread. Periodically prints the cache statistics to the log. * Statistics thread. Periodically prints the cache statistics to the log.
*/ */
private static class StatisticsThread extends Thread { static class StatisticsThread extends Thread {
LruBlockCache lru; LruBlockCache lru;
public StatisticsThread(LruBlockCache lru) { public StatisticsThread(LruBlockCache lru) {
@ -539,27 +544,23 @@ public class LruBlockCache implements BlockCache, HeapSize {
} }
public void logStats() { public void logStats() {
if (!LOG.isDebugEnabled()) return;
// Log size // Log size
long totalSize = heapSize(); long totalSize = heapSize();
long freeSize = maxSize - totalSize; long freeSize = maxSize - totalSize;
float sizeMB = ((float)totalSize)/((float)(1024*1024)); LruBlockCache.LOG.debug("LRU Stats: " +
float freeMB = ((float)freeSize)/((float)(1024*1024)); "total=" + StringUtils.byteDesc(totalSize) + ", " +
float maxMB = ((float)maxSize)/((float)(1024*1024)); "free=" + StringUtils.byteDesc(freeSize) + ", " +
LruBlockCache.LOG.debug("Cache Stats: Sizes: " + "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
"Total=" + sizeMB + "MB (" + totalSize + "), " + "blocks=" + size() +", " +
"Free=" + freeMB + "MB (" + freeSize + "), " + "accesses=" + stats.getRequestCount() + ", " +
"Max=" + maxMB + "MB (" + maxSize +")" + "hits=" + stats.getHitCount() + ", " +
", Counts: " + "misses=" + stats.getMissCount() + ", " +
"Blocks=" + size() +", " + "evictions=" + stats.getEvictionCount() + ", " +
"Access=" + stats.getRequestCount() + ", " + "evicted=" + stats.getEvictedCount() + ", " +
"Hit=" + stats.getHitCount() + ", " + "hitRatio=" + StringUtils.formatPercent(stats.getHitRatio(), 2) + "%, " +
"Miss=" + stats.getMissCount() + ", " + "missRatio=" + StringUtils.formatPercent(stats.getMissRatio(), 2) + "%, " +
"Evictions=" + stats.getEvictionCount() + ", " + "evictedPerRun=" + stats.evictedPerEviction());
"Evicted=" + stats.getEvictedCount() +
", Ratios: " +
"Hit Ratio=" + stats.getHitRatio()*100 + "%, " +
"Miss Ratio=" + stats.getMissRatio()*100 + "%, " +
"Evicted/Run=" + stats.evictedPerEviction());
} }
/** /**

View File

@ -20,6 +20,8 @@
package org.apache.hadoop.hbase.io.hfile; package org.apache.hadoop.hbase.io.hfile;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.LinkedList;
import junit.framework.TestCase; import junit.framework.TestCase;
public class TestCachedBlockQueue extends TestCase { public class TestCachedBlockQueue extends TestCase {
@ -57,15 +59,16 @@ public class TestCachedBlockQueue extends TestCase {
assertEquals(queue.heapSize(), expectedSize); assertEquals(queue.heapSize(), expectedSize);
org.apache.hadoop.hbase.io.hfile.CachedBlock [] blocks = queue.get(); LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks =
assertEquals(blocks[0].getName(), "cb1"); queue.get();
assertEquals(blocks[1].getName(), "cb2"); assertEquals(blocks.poll().getName(), "cb1");
assertEquals(blocks[2].getName(), "cb3"); assertEquals(blocks.poll().getName(), "cb2");
assertEquals(blocks[3].getName(), "cb4"); assertEquals(blocks.poll().getName(), "cb3");
assertEquals(blocks[4].getName(), "cb5"); assertEquals(blocks.poll().getName(), "cb4");
assertEquals(blocks[5].getName(), "cb6"); assertEquals(blocks.poll().getName(), "cb5");
assertEquals(blocks[6].getName(), "cb7"); assertEquals(blocks.poll().getName(), "cb6");
assertEquals(blocks[7].getName(), "cb8"); assertEquals(blocks.poll().getName(), "cb7");
assertEquals(blocks.poll().getName(), "cb8");
} }
@ -109,16 +112,16 @@ public class TestCachedBlockQueue extends TestCase {
assertEquals(queue.heapSize(), expectedSize); assertEquals(queue.heapSize(), expectedSize);
org.apache.hadoop.hbase.io.hfile.CachedBlock [] blocks = queue.get(); LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks = queue.get();
assertEquals(blocks[0].getName(), "cb0"); assertEquals(blocks.poll().getName(), "cb0");
assertEquals(blocks[1].getName(), "cb1"); assertEquals(blocks.poll().getName(), "cb1");
assertEquals(blocks[2].getName(), "cb2"); assertEquals(blocks.poll().getName(), "cb2");
assertEquals(blocks[3].getName(), "cb3"); assertEquals(blocks.poll().getName(), "cb3");
assertEquals(blocks[4].getName(), "cb4"); assertEquals(blocks.poll().getName(), "cb4");
assertEquals(blocks[5].getName(), "cb5"); assertEquals(blocks.poll().getName(), "cb5");
assertEquals(blocks[6].getName(), "cb6"); assertEquals(blocks.poll().getName(), "cb6");
assertEquals(blocks[7].getName(), "cb7"); assertEquals(blocks.poll().getName(), "cb7");
assertEquals(blocks[8].getName(), "cb8"); assertEquals(blocks.poll().getName(), "cb8");
} }
@ -130,5 +133,4 @@ public class TestCachedBlockQueue extends TestCase {
accessTime,false); accessTime,false);
} }
} }
} }

View File

@ -117,6 +117,9 @@ public class TestLruBlockCache extends TestCase {
// Expect no evictions // Expect no evictions
assertEquals(0, cache.getEvictionCount()); assertEquals(0, cache.getEvictionCount());
Thread t = new LruBlockCache.StatisticsThread(cache);
t.start();
t.join();
} }
public void testCacheEvictionSimple() throws Exception { public void testCacheEvictionSimple() throws Exception {