HBASE-8135 Mutation should implement HeapSize

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1458418 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
nkeywal 2013-03-19 17:30:05 +00:00
parent c352a848a1
commit 01bb623745
5 changed files with 89 additions and 33 deletions

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
/** /**
* Used to perform Increment operations on a single row. * Used to perform Increment operations on a single row.
@ -47,6 +48,8 @@ import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Stable @InterfaceStability.Stable
public class Increment extends Mutation implements Comparable<Row> { public class Increment extends Mutation implements Comparable<Row> {
private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + ClassSize.TIMERANGE;
private TimeRange tr = new TimeRange(); private TimeRange tr = new TimeRange();
/** /**
@ -255,4 +258,8 @@ public class Increment extends Mutation implements Comparable<Row> {
Row other = (Row) obj; Row other = (Row) obj;
return compareTo(other) == 0; return compareTo(other) == 0;
} }
protected long extraHeapSize(){
return HEAP_OVERHEAD;
}
} }

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
@ -41,7 +42,8 @@ import java.util.UUID;
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable { public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,
HeapSize {
static final long MUTATION_OVERHEAD = ClassSize.align( static final long MUTATION_OVERHEAD = ClassSize.align(
// This // This
ClassSize.OBJECT + ClassSize.OBJECT +
@ -271,7 +273,8 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
/** /**
* @return Calculate what Mutation adds to class heap size. * @return Calculate what Mutation adds to class heap size.
*/ */
long heapSize() { @Override
public long heapSize() {
long heapsize = MUTATION_OVERHEAD; long heapsize = MUTATION_OVERHEAD;
// Adding row // Adding row
heapsize += ClassSize.align(ClassSize.ARRAY + this.row.length); heapsize += ClassSize.align(ClassSize.ARRAY + this.row.length);
@ -298,9 +301,19 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
} }
} }
heapsize += getAttributeSize(); heapsize += getAttributeSize();
return heapsize; heapsize += extraHeapSize();
return ClassSize.align(heapsize);
} }
/**
* Subclasses should override this method to add the heap size of their own fields.
* @return the heap size to add (will be aligned).
*/
protected long extraHeapSize(){
return 0L;
}
/** /**
* @param row Row to check * @param row Row to check
* @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or * @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or

View File

@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
/** /**
* Used to perform Put operations for a single row. * Used to perform Put operations for a single row.
@ -284,9 +283,4 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
} }
return filteredList; return filteredList;
} }
@Override
public long heapSize() {
return ClassSize.align((int)super.heapSize());
}
} }

View File

@ -103,6 +103,9 @@ public class ClassSize {
/** Overhead for CopyOnWriteArrayList */ /** Overhead for CopyOnWriteArrayList */
public static final int COPYONWRITE_ARRAYLIST; public static final int COPYONWRITE_ARRAYLIST;
/** Overhead for timerange */
public static final int TIMERANGE;
/* Are we running on jdk7? */ /* Are we running on jdk7? */
private static final boolean JDK7; private static final boolean JDK7;
static { static {
@ -179,6 +182,8 @@ public class ClassSize {
COPYONWRITE_ARRAYSET = align(OBJECT + REFERENCE); COPYONWRITE_ARRAYSET = align(OBJECT + REFERENCE);
COPYONWRITE_ARRAYLIST = align(OBJECT + (2 * REFERENCE) + ARRAY); COPYONWRITE_ARRAYLIST = align(OBJECT + (2 * REFERENCE) + ARRAY);
TIMERANGE = align(ClassSize.OBJECT + Bytes.SIZEOF_LONG * 2 + Bytes.SIZEOF_BOOLEAN);
} }
/** /**

View File

@ -35,12 +35,12 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import junit.framework.TestCase;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.CachedBlock; import org.apache.hadoop.hbase.io.hfile.CachedBlock;
@ -50,21 +50,24 @@ import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.MemStore; import org.apache.hadoop.hbase.regionserver.MemStore;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
/** /**
* Testing the sizing that HeapSize offers and compares to the size given by * Testing the sizing that HeapSize offers and compares to the size given by
* ClassSize. * ClassSize.
*/ */
@Category(SmallTests.class) @Category(SmallTests.class)
public class TestHeapSize extends TestCase { public class TestHeapSize {
static final Log LOG = LogFactory.getLog(TestHeapSize.class); static final Log LOG = LogFactory.getLog(TestHeapSize.class);
// List of classes implementing HeapSize // List of classes implementing HeapSize
// BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey // BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey
// KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey // KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
@BeforeClass @BeforeClass
public void beforeClass() throws Exception { public static void beforeClass() throws Exception {
// Print detail on jvm so we know what is different should below test fail. // Print detail on jvm so we know what is different should below test fail.
RuntimeMXBean b = ManagementFactory.getRuntimeMXBean(); RuntimeMXBean b = ManagementFactory.getRuntimeMXBean();
LOG.info("name=" + b.getName()); LOG.info("name=" + b.getName());
@ -80,11 +83,11 @@ public class TestHeapSize extends TestCase {
/** /**
* Test our hard-coded sizing of native java objects * Test our hard-coded sizing of native java objects
*/ */
@Test
public void testNativeSizes() throws IOException { public void testNativeSizes() throws IOException {
@SuppressWarnings("rawtypes") Class<?> cl;
Class cl = null; long expected;
long expected = 0L; long actual;
long actual = 0L;
// ArrayList // ArrayList
cl = ArrayList.class; cl = ArrayList.class;
@ -231,11 +234,11 @@ public class TestHeapSize extends TestCase {
* TestHFile since it is a non public class * TestHFile since it is a non public class
* @throws IOException * @throws IOException
*/ */
@Test
public void testSizes() throws IOException { public void testSizes() throws IOException {
@SuppressWarnings("rawtypes") Class<?> cl;
Class cl = null; long expected;
long expected = 0L; long actual;
long actual = 0L;
//KeyValue //KeyValue
cl = KeyValue.class; cl = KeyValue.class;
@ -247,18 +250,6 @@ public class TestHeapSize extends TestCase {
assertEquals(expected, actual); assertEquals(expected, actual);
} }
//Put
cl = Put.class;
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE);
Put put = new Put(new byte [] {'0'});
actual = put.heapSize();
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
//LruBlockCache Overhead //LruBlockCache Overhead
cl = LruBlockCache.class; cl = LruBlockCache.class;
actual = LruBlockCache.CACHE_FIXED_OVERHEAD; actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
@ -348,5 +339,51 @@ public class TestHeapSize extends TestCase {
// any of these classes are modified without updating overhead sizes. // any of these classes are modified without updating overhead sizes.
} }
@Test
public void testMutations(){
Class<?> cl;
long expected;
long actual;
cl = TimeRange.class;
actual = ClassSize.TIMERANGE;
expected = ClassSize.estimateBase(cl, false);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
cl = Put.class;
actual = new Put(new byte[]{0}).heapSize();
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
cl = Delete.class;
actual = new Delete(new byte[]{0}).heapSize();
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap is not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
cl = Increment.class;
actual = new Increment(new byte[]{0}).heapSize();
expected = ClassSize.estimateBase(cl, false);
//The actual TreeMap and TimeRange are not included in the above calculation
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE + ClassSize.TIMERANGE);
if (expected != actual) {
ClassSize.estimateBase(cl, true);
assertEquals(expected, actual);
}
}
} }