HBASE-8135 Mutation should implement HeapSize
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1458418 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c352a848a1
commit
01bb623745
|
@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.KeyValue;
|
|||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
/**
|
||||
* Used to perform Increment operations on a single row.
|
||||
|
@ -47,6 +48,8 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Stable
|
||||
public class Increment extends Mutation implements Comparable<Row> {
|
||||
private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + ClassSize.TIMERANGE;
|
||||
|
||||
private TimeRange tr = new TimeRange();
|
||||
|
||||
/**
|
||||
|
@ -255,4 +258,8 @@ public class Increment extends Mutation implements Comparable<Row> {
|
|||
Row other = (Row) obj;
|
||||
return compareTo(other) == 0;
|
||||
}
|
||||
|
||||
protected long extraHeapSize(){
|
||||
return HEAP_OVERHEAD;
|
||||
}
|
||||
}
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.CellUtil;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
|
@ -41,7 +42,8 @@ import java.util.UUID;
|
|||
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable {
|
||||
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,
|
||||
HeapSize {
|
||||
static final long MUTATION_OVERHEAD = ClassSize.align(
|
||||
// This
|
||||
ClassSize.OBJECT +
|
||||
|
@ -271,7 +273,8 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
/**
|
||||
* @return Calculate what Mutation adds to class heap size.
|
||||
*/
|
||||
long heapSize() {
|
||||
@Override
|
||||
public long heapSize() {
|
||||
long heapsize = MUTATION_OVERHEAD;
|
||||
// Adding row
|
||||
heapsize += ClassSize.align(ClassSize.ARRAY + this.row.length);
|
||||
|
@ -298,9 +301,19 @@ public abstract class Mutation extends OperationWithAttributes implements Row, C
|
|||
}
|
||||
}
|
||||
heapsize += getAttributeSize();
|
||||
return heapsize;
|
||||
heapsize += extraHeapSize();
|
||||
return ClassSize.align(heapsize);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses should override this method to add the heap size of their own fields.
|
||||
* @return the heap size to add (will be aligned).
|
||||
*/
|
||||
protected long extraHeapSize(){
|
||||
return 0L;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param row Row to check
|
||||
* @throws IllegalArgumentException Thrown if <code>row</code> is empty or null or
|
||||
|
|
|
@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.KeyValue;
|
|||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
/**
|
||||
* Used to perform Put operations for a single row.
|
||||
|
@ -284,9 +283,4 @@ public class Put extends Mutation implements HeapSize, Comparable<Row> {
|
|||
}
|
||||
return filteredList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long heapSize() {
|
||||
return ClassSize.align((int)super.heapSize());
|
||||
}
|
||||
}
|
|
@ -103,6 +103,9 @@ public class ClassSize {
|
|||
/** Overhead for CopyOnWriteArrayList */
|
||||
public static final int COPYONWRITE_ARRAYLIST;
|
||||
|
||||
/** Overhead for timerange */
|
||||
public static final int TIMERANGE;
|
||||
|
||||
/* Are we running on jdk7? */
|
||||
private static final boolean JDK7;
|
||||
static {
|
||||
|
@ -179,6 +182,8 @@ public class ClassSize {
|
|||
COPYONWRITE_ARRAYSET = align(OBJECT + REFERENCE);
|
||||
|
||||
COPYONWRITE_ARRAYLIST = align(OBJECT + (2 * REFERENCE) + ARRAY);
|
||||
|
||||
TIMERANGE = align(ClassSize.OBJECT + Bytes.SIZEOF_LONG * 2 + Bytes.SIZEOF_BOOLEAN);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,12 +35,12 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Increment;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
|
||||
import org.apache.hadoop.hbase.io.hfile.CachedBlock;
|
||||
|
@ -50,21 +50,24 @@ import org.apache.hadoop.hbase.regionserver.HStore;
|
|||
import org.apache.hadoop.hbase.regionserver.MemStore;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
/**
|
||||
* Testing the sizing that HeapSize offers and compares to the size given by
|
||||
* ClassSize.
|
||||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestHeapSize extends TestCase {
|
||||
public class TestHeapSize {
|
||||
static final Log LOG = LogFactory.getLog(TestHeapSize.class);
|
||||
// List of classes implementing HeapSize
|
||||
// BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey
|
||||
// KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
|
||||
|
||||
@BeforeClass
|
||||
public void beforeClass() throws Exception {
|
||||
public static void beforeClass() throws Exception {
|
||||
// Print detail on jvm so we know what is different should below test fail.
|
||||
RuntimeMXBean b = ManagementFactory.getRuntimeMXBean();
|
||||
LOG.info("name=" + b.getName());
|
||||
|
@ -80,11 +83,11 @@ public class TestHeapSize extends TestCase {
|
|||
/**
|
||||
* Test our hard-coded sizing of native java objects
|
||||
*/
|
||||
@Test
|
||||
public void testNativeSizes() throws IOException {
|
||||
@SuppressWarnings("rawtypes")
|
||||
Class cl = null;
|
||||
long expected = 0L;
|
||||
long actual = 0L;
|
||||
Class<?> cl;
|
||||
long expected;
|
||||
long actual;
|
||||
|
||||
// ArrayList
|
||||
cl = ArrayList.class;
|
||||
|
@ -231,11 +234,11 @@ public class TestHeapSize extends TestCase {
|
|||
* TestHFile since it is a non public class
|
||||
* @throws IOException
|
||||
*/
|
||||
@Test
|
||||
public void testSizes() throws IOException {
|
||||
@SuppressWarnings("rawtypes")
|
||||
Class cl = null;
|
||||
long expected = 0L;
|
||||
long actual = 0L;
|
||||
Class<?> cl;
|
||||
long expected;
|
||||
long actual;
|
||||
|
||||
//KeyValue
|
||||
cl = KeyValue.class;
|
||||
|
@ -247,18 +250,6 @@ public class TestHeapSize extends TestCase {
|
|||
assertEquals(expected, actual);
|
||||
}
|
||||
|
||||
//Put
|
||||
cl = Put.class;
|
||||
expected = ClassSize.estimateBase(cl, false);
|
||||
//The actual TreeMap is not included in the above calculation
|
||||
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE);
|
||||
Put put = new Put(new byte [] {'0'});
|
||||
actual = put.heapSize();
|
||||
if (expected != actual) {
|
||||
ClassSize.estimateBase(cl, true);
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
|
||||
//LruBlockCache Overhead
|
||||
cl = LruBlockCache.class;
|
||||
actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
|
||||
|
@ -348,5 +339,51 @@ public class TestHeapSize extends TestCase {
|
|||
// any of these classes are modified without updating overhead sizes.
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMutations(){
|
||||
Class<?> cl;
|
||||
long expected;
|
||||
long actual;
|
||||
|
||||
cl = TimeRange.class;
|
||||
actual = ClassSize.TIMERANGE;
|
||||
expected = ClassSize.estimateBase(cl, false);
|
||||
if (expected != actual) {
|
||||
ClassSize.estimateBase(cl, true);
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
|
||||
cl = Put.class;
|
||||
actual = new Put(new byte[]{0}).heapSize();
|
||||
expected = ClassSize.estimateBase(cl, false);
|
||||
//The actual TreeMap is not included in the above calculation
|
||||
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE);
|
||||
if (expected != actual) {
|
||||
ClassSize.estimateBase(cl, true);
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
|
||||
|
||||
cl = Delete.class;
|
||||
actual = new Delete(new byte[]{0}).heapSize();
|
||||
expected = ClassSize.estimateBase(cl, false);
|
||||
//The actual TreeMap is not included in the above calculation
|
||||
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE);
|
||||
if (expected != actual) {
|
||||
ClassSize.estimateBase(cl, true);
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
|
||||
cl = Increment.class;
|
||||
actual = new Increment(new byte[]{0}).heapSize();
|
||||
expected = ClassSize.estimateBase(cl, false);
|
||||
//The actual TreeMap and TimeRange are not included in the above calculation
|
||||
expected += ClassSize.align(ClassSize.TREEMAP + ClassSize.REFERENCE + ClassSize.TIMERANGE);
|
||||
if (expected != actual) {
|
||||
ClassSize.estimateBase(cl, true);
|
||||
assertEquals(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue