HBASE-4752 Don't create an unnecessary LinkedList when evicting from the BlockCache
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1201516 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
87c500ccf3
commit
7d23c9afb1
|
@ -19,8 +19,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.io.hfile;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.PriorityQueue;
|
||||
import com.google.common.collect.MinMaxPriorityQueue;
|
||||
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
|
||||
|
@ -39,7 +38,7 @@ import org.apache.hadoop.hbase.io.HeapSize;
|
|||
*/
|
||||
public class CachedBlockQueue implements HeapSize {
|
||||
|
||||
private PriorityQueue<CachedBlock> queue;
|
||||
private MinMaxPriorityQueue<CachedBlock> queue;
|
||||
|
||||
private long heapSize;
|
||||
private long maxSize;
|
||||
|
@ -51,7 +50,7 @@ public class CachedBlockQueue implements HeapSize {
|
|||
public CachedBlockQueue(long maxSize, long blockSize) {
|
||||
int initialSize = (int)(maxSize / blockSize);
|
||||
if(initialSize == 0) initialSize++;
|
||||
queue = new PriorityQueue<CachedBlock>(initialSize);
|
||||
queue = MinMaxPriorityQueue.expectedSize(initialSize).create();
|
||||
heapSize = 0;
|
||||
this.maxSize = maxSize;
|
||||
}
|
||||
|
@ -84,14 +83,19 @@ public class CachedBlockQueue implements HeapSize {
|
|||
}
|
||||
|
||||
/**
|
||||
* @return a sorted List of all elements in this queue, in descending order
|
||||
* @return The next element in this queue, or {@code null} if the queue is
|
||||
* empty.
|
||||
*/
|
||||
public LinkedList<CachedBlock> get() {
|
||||
LinkedList<CachedBlock> blocks = new LinkedList<CachedBlock>();
|
||||
while (!queue.isEmpty()) {
|
||||
blocks.addFirst(queue.poll());
|
||||
}
|
||||
return blocks;
|
||||
public CachedBlock poll() {
|
||||
return queue.poll();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The last element in this queue, or {@code null} if the queue is
|
||||
* empty.
|
||||
*/
|
||||
public CachedBlock pollLast() {
|
||||
return queue.pollLast();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -24,7 +24,6 @@ import java.lang.ref.WeakReference;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.PriorityQueue;
|
||||
|
@ -494,11 +493,11 @@ public class LruBlockCache implements BlockCache, HeapSize {
|
|||
}
|
||||
|
||||
public long free(long toFree) {
|
||||
LinkedList<CachedBlock> blocks = queue.get();
|
||||
CachedBlock cb;
|
||||
long freedBytes = 0;
|
||||
for(CachedBlock cb: blocks) {
|
||||
while ((cb = queue.pollLast()) != null) {
|
||||
freedBytes += evictBlock(cb);
|
||||
if(freedBytes >= toFree) {
|
||||
if (freedBytes >= toFree) {
|
||||
return freedBytes;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,17 +63,9 @@ public class TestCachedBlockQueue extends TestCase {
|
|||
|
||||
assertEquals(queue.heapSize(), expectedSize);
|
||||
|
||||
LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks =
|
||||
queue.get();
|
||||
assertEquals(blocks.poll().getName(), "cb1");
|
||||
assertEquals(blocks.poll().getName(), "cb2");
|
||||
assertEquals(blocks.poll().getName(), "cb3");
|
||||
assertEquals(blocks.poll().getName(), "cb4");
|
||||
assertEquals(blocks.poll().getName(), "cb5");
|
||||
assertEquals(blocks.poll().getName(), "cb6");
|
||||
assertEquals(blocks.poll().getName(), "cb7");
|
||||
assertEquals(blocks.poll().getName(), "cb8");
|
||||
|
||||
for (int i = 1; i <= 8; i++) {
|
||||
assertEquals(queue.pollLast().getName(), "cb"+i);
|
||||
}
|
||||
}
|
||||
|
||||
public void testQueueSmallBlockEdgeCase() throws Exception {
|
||||
|
@ -116,17 +108,9 @@ public class TestCachedBlockQueue extends TestCase {
|
|||
|
||||
assertEquals(queue.heapSize(), expectedSize);
|
||||
|
||||
LinkedList<org.apache.hadoop.hbase.io.hfile.CachedBlock> blocks = queue.get();
|
||||
assertEquals(blocks.poll().getName(), "cb0");
|
||||
assertEquals(blocks.poll().getName(), "cb1");
|
||||
assertEquals(blocks.poll().getName(), "cb2");
|
||||
assertEquals(blocks.poll().getName(), "cb3");
|
||||
assertEquals(blocks.poll().getName(), "cb4");
|
||||
assertEquals(blocks.poll().getName(), "cb5");
|
||||
assertEquals(blocks.poll().getName(), "cb6");
|
||||
assertEquals(blocks.poll().getName(), "cb7");
|
||||
assertEquals(blocks.poll().getName(), "cb8");
|
||||
|
||||
for (int i = 0; i <= 8; i++) {
|
||||
assertEquals(queue.pollLast().getName(), "cb"+i);
|
||||
}
|
||||
}
|
||||
|
||||
private static class CachedBlock extends org.apache.hadoop.hbase.io.hfile.CachedBlock
|
||||
|
|
Loading…
Reference in New Issue