HBASE-1562 How to handle the setting of 32 bit versus 64 bit machines
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@788164 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7886526f30
commit
8732e8d44f
|
@ -399,6 +399,8 @@ Release 0.20.0 - Unreleased
|
||||||
ConcurrentSkipListSet
|
ConcurrentSkipListSet
|
||||||
HBASE-1578 Change the name of the in-memory updates from 'memcache' to
|
HBASE-1578 Change the name of the in-memory updates from 'memcache' to
|
||||||
'memtable' or....
|
'memtable' or....
|
||||||
|
HBASE-1562 How to handle the setting of 32 bit versus 64 bit machines
|
||||||
|
(Erik Holstad via Stack)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
HBASE-1412 Change values for delete column and column family in KeyValue
|
HBASE-1412 Change values for delete column and column family in KeyValue
|
||||||
|
|
|
@ -1784,8 +1784,9 @@ public class KeyValue implements Writable, HeapSize {
|
||||||
|
|
||||||
// HeapSize
|
// HeapSize
|
||||||
public long heapSize() {
|
public long heapSize() {
|
||||||
return ClassSize.alignSize(HeapSize.OBJECT + HeapSize.REFERENCE +
|
return ClassSize.align(ClassSize.OBJECT + ClassSize.REFERENCE +
|
||||||
HeapSize.BYTE_ARRAY + length + (2 * Bytes.SIZEOF_INT));
|
ClassSize.align(ClassSize.ARRAY + length) +
|
||||||
|
(2 * Bytes.SIZEOF_INT));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Writable
|
// Writable
|
||||||
|
|
|
@ -52,9 +52,10 @@ public class Put implements HeapSize, Writable, Comparable<Put> {
|
||||||
private Map<byte [], List<KeyValue>> familyMap =
|
private Map<byte [], List<KeyValue>> familyMap =
|
||||||
new TreeMap<byte [], List<KeyValue>>(Bytes.BYTES_COMPARATOR);
|
new TreeMap<byte [], List<KeyValue>>(Bytes.BYTES_COMPARATOR);
|
||||||
|
|
||||||
private static final long OVERHEAD = ClassSize.alignSize(HeapSize.OBJECT +
|
private static final long OVERHEAD = ClassSize.align(
|
||||||
1 * HeapSize.REFERENCE + 1 * HeapSize.ARRAY + 2 * Bytes.SIZEOF_LONG +
|
ClassSize.OBJECT + ClassSize.REFERENCE +
|
||||||
1 * Bytes.SIZEOF_BOOLEAN + 1 * HeapSize.REFERENCE + HeapSize.TREEMAP_SIZE);
|
2 * Bytes.SIZEOF_LONG + Bytes.SIZEOF_BOOLEAN +
|
||||||
|
ClassSize.REFERENCE + ClassSize.TREEMAP);
|
||||||
|
|
||||||
/** Constructor for Writable. DO NOT USE */
|
/** Constructor for Writable. DO NOT USE */
|
||||||
public Put() {}
|
public Put() {}
|
||||||
|
@ -201,10 +202,16 @@ public class Put implements HeapSize, Writable, Comparable<Put> {
|
||||||
this.timestamp = timestamp;
|
this.timestamp = timestamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the number of different families included in this put
|
||||||
|
*/
|
||||||
public int numFamilies() {
|
public int numFamilies() {
|
||||||
return familyMap.size();
|
return familyMap.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the total number of KeyValues that will be added with this put
|
||||||
|
*/
|
||||||
public int size() {
|
public int size() {
|
||||||
int size = 0;
|
int size = 0;
|
||||||
for(List<KeyValue> kvList : this.familyMap.values()) {
|
for(List<KeyValue> kvList : this.familyMap.values()) {
|
||||||
|
@ -270,28 +277,30 @@ public class Put implements HeapSize, Writable, Comparable<Put> {
|
||||||
//HeapSize
|
//HeapSize
|
||||||
public long heapSize() {
|
public long heapSize() {
|
||||||
long heapsize = OVERHEAD;
|
long heapsize = OVERHEAD;
|
||||||
heapsize += ClassSize.alignSize(this.row.length);
|
//Adding row
|
||||||
|
heapsize += ClassSize.align(ClassSize.ARRAY + this.row.length);
|
||||||
|
|
||||||
|
//Adding map overhead
|
||||||
|
heapsize +=
|
||||||
|
ClassSize.align(this.familyMap.size() * ClassSize.MAP_ENTRY);
|
||||||
for(Map.Entry<byte [], List<KeyValue>> entry : this.familyMap.entrySet()) {
|
for(Map.Entry<byte [], List<KeyValue>> entry : this.familyMap.entrySet()) {
|
||||||
//Adding entry overhead
|
|
||||||
heapsize += HeapSize.MAP_ENTRY_SIZE;
|
|
||||||
|
|
||||||
//Adding key overhead
|
//Adding key overhead
|
||||||
heapsize += HeapSize.REFERENCE + HeapSize.ARRAY +
|
heapsize +=
|
||||||
ClassSize.alignSize(entry.getKey().length);
|
ClassSize.align(ClassSize.ARRAY + entry.getKey().length);
|
||||||
|
|
||||||
//This part is kinds tricky since the JVM can reuse references if you
|
//This part is kinds tricky since the JVM can reuse references if you
|
||||||
//store the same value, but have a good match with SizeOf at the moment
|
//store the same value, but have a good match with SizeOf at the moment
|
||||||
//Adding value overhead
|
//Adding value overhead
|
||||||
heapsize += HeapSize.REFERENCE + HeapSize.ARRAYLIST_SIZE;
|
heapsize += ClassSize.align(ClassSize.ARRAYLIST);
|
||||||
int size = entry.getValue().size();
|
int size = entry.getValue().size();
|
||||||
heapsize += size * HeapSize.REFERENCE;
|
heapsize += ClassSize.align(ClassSize.ARRAY +
|
||||||
|
size * ClassSize.REFERENCE);
|
||||||
|
|
||||||
for(KeyValue kv : entry.getValue()) {
|
for(KeyValue kv : entry.getValue()) {
|
||||||
heapsize += kv.heapSize();
|
heapsize += kv.heapSize();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return heapsize;
|
return ClassSize.align((int)heapsize);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Writable
|
//Writable
|
||||||
|
|
|
@ -38,38 +38,6 @@ package org.apache.hadoop.hbase.io;
|
||||||
* </pre>
|
* </pre>
|
||||||
*/
|
*/
|
||||||
public interface HeapSize {
|
public interface HeapSize {
|
||||||
|
|
||||||
/** Reference size is 8 bytes on 64-bit, 4 bytes on 32-bit */
|
|
||||||
static final int REFERENCE = 8;
|
|
||||||
|
|
||||||
/** Object overhead is minimum 2 * reference size (8 bytes on 64-bit) */
|
|
||||||
static final int OBJECT = 2 * REFERENCE;
|
|
||||||
|
|
||||||
/** Array overhead */
|
|
||||||
static final int ARRAY = 3 * REFERENCE;
|
|
||||||
|
|
||||||
/** OverHead for nested arrays */
|
|
||||||
static final int MULTI_ARRAY = (4 * REFERENCE) + ARRAY;
|
|
||||||
|
|
||||||
/** Byte arrays are fixed size below plus its length, 8 byte aligned */
|
|
||||||
static final int BYTE_ARRAY = 3 * REFERENCE;
|
|
||||||
|
|
||||||
/** Overhead for ByteBuffer */
|
|
||||||
static final int BYTE_BUFFER = 56;
|
|
||||||
|
|
||||||
/** String overhead */
|
|
||||||
static final int STRING_SIZE = 64;
|
|
||||||
|
|
||||||
/** Overhead for ArrayList(0) */
|
|
||||||
static final int ARRAYLIST_SIZE = 64;
|
|
||||||
|
|
||||||
/** Overhead for TreeMap */
|
|
||||||
static final int TREEMAP_SIZE = 80;
|
|
||||||
|
|
||||||
/** Overhead for entry in map */
|
|
||||||
static final int MAP_ENTRY_SIZE = 64;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Approximate 'exclusive deep size' of implementing object. Includes
|
* @return Approximate 'exclusive deep size' of implementing object. Includes
|
||||||
* count of payload and hosting object sizings.
|
* count of payload and hosting object sizings.
|
||||||
|
|
|
@ -1339,10 +1339,6 @@ public class HFile {
|
||||||
*/
|
*/
|
||||||
final RawComparator<byte []> comparator;
|
final RawComparator<byte []> comparator;
|
||||||
|
|
||||||
static final int OVERHEAD = (int)ClassSize.alignSize(HeapSize.OBJECT +
|
|
||||||
2 * Bytes.SIZEOF_INT + 1 * HeapSize.MULTI_ARRAY + 2 * HeapSize.ARRAY +
|
|
||||||
4 * HeapSize.REFERENCE);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Shutdown default constructor
|
* Shutdown default constructor
|
||||||
*/
|
*/
|
||||||
|
@ -1498,23 +1494,28 @@ public class HFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
public long heapSize() {
|
public long heapSize() {
|
||||||
long size = OVERHEAD;
|
long heapsize = ClassSize.align(ClassSize.OBJECT +
|
||||||
|
2 * Bytes.SIZEOF_INT + (3 + 1) * ClassSize.REFERENCE);
|
||||||
//Calculating the size of blockKeys
|
//Calculating the size of blockKeys
|
||||||
if(blockKeys != null) {
|
if(blockKeys != null) {
|
||||||
|
//Adding array + references overhead
|
||||||
|
heapsize += ClassSize.align(ClassSize.ARRAY +
|
||||||
|
blockKeys.length * ClassSize.REFERENCE);
|
||||||
|
//Adding bytes
|
||||||
for(byte [] bs : blockKeys) {
|
for(byte [] bs : blockKeys) {
|
||||||
size += HeapSize.MULTI_ARRAY;
|
heapsize += ClassSize.align(ClassSize.ARRAY + bs.length);
|
||||||
size += ClassSize.alignSize(bs.length);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(blockOffsets != null) {
|
if(blockOffsets != null) {
|
||||||
size += blockOffsets.length * Bytes.SIZEOF_LONG;
|
heapsize += ClassSize.align(ClassSize.ARRAY +
|
||||||
|
blockOffsets.length * Bytes.SIZEOF_LONG);
|
||||||
}
|
}
|
||||||
if(blockDataSizes != null) {
|
if(blockDataSizes != null) {
|
||||||
size += blockDataSizes.length * Bytes.SIZEOF_INT;
|
heapsize += ClassSize.align(ClassSize.ARRAY +
|
||||||
|
blockDataSizes.length * Bytes.SIZEOF_INT);
|
||||||
}
|
}
|
||||||
|
|
||||||
return size;
|
return ClassSize.align(heapsize);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,9 +90,10 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
private long missCount = 0;
|
private long missCount = 0;
|
||||||
|
|
||||||
/** Memory overhead of this Object (for HeapSize) */
|
/** Memory overhead of this Object (for HeapSize) */
|
||||||
private static final int OVERHEAD = (int)ClassSize.alignSize(HeapSize.OBJECT +
|
private static final int OVERHEAD = ClassSize.align(
|
||||||
1 * Bytes.SIZEOF_FLOAT + 2 * Bytes.SIZEOF_INT + 1 * HeapSize.ARRAY +
|
ClassSize.OBJECT + 1 * Bytes.SIZEOF_FLOAT + 2 * Bytes.SIZEOF_INT +
|
||||||
3 * HeapSize.REFERENCE + 4 * Bytes.SIZEOF_LONG);
|
ClassSize.align(ClassSize.ARRAY) + 3 * ClassSize.REFERENCE +
|
||||||
|
4 * Bytes.SIZEOF_LONG);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a new, empty map with the specified initial capacity,
|
* Constructs a new, empty map with the specified initial capacity,
|
||||||
|
@ -119,7 +120,7 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
if (loadFactor <= 0 || Float.isNaN(loadFactor)) {
|
if (loadFactor <= 0 || Float.isNaN(loadFactor)) {
|
||||||
throw new IllegalArgumentException("Load factor must be > 0");
|
throw new IllegalArgumentException("Load factor must be > 0");
|
||||||
}
|
}
|
||||||
if (maxMemUsage <= (OVERHEAD + initialCapacity * HeapSize.REFERENCE)) {
|
if (maxMemUsage <= (OVERHEAD + initialCapacity * ClassSize.REFERENCE)) {
|
||||||
throw new IllegalArgumentException("Max memory usage too small to " +
|
throw new IllegalArgumentException("Max memory usage too small to " +
|
||||||
"support base overhead");
|
"support base overhead");
|
||||||
}
|
}
|
||||||
|
@ -300,7 +301,7 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
* @return memory usage of map in bytes
|
* @return memory usage of map in bytes
|
||||||
*/
|
*/
|
||||||
public long heapSize() {
|
public long heapSize() {
|
||||||
return (memTotal - memFree);
|
return ClassSize.align(memTotal - memFree);
|
||||||
}
|
}
|
||||||
|
|
||||||
//--------------------------------------------------------------------------
|
//--------------------------------------------------------------------------
|
||||||
|
@ -503,7 +504,7 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
* @return baseline memory overhead of object in bytes
|
* @return baseline memory overhead of object in bytes
|
||||||
*/
|
*/
|
||||||
private long getMinimumUsage() {
|
private long getMinimumUsage() {
|
||||||
return OVERHEAD + (entries.length * HeapSize.REFERENCE);
|
return OVERHEAD + (entries.length * ClassSize.REFERENCE);
|
||||||
}
|
}
|
||||||
|
|
||||||
//--------------------------------------------------------------------------
|
//--------------------------------------------------------------------------
|
||||||
|
@ -724,7 +725,7 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine how much additional space will be required to grow the array
|
// Determine how much additional space will be required to grow the array
|
||||||
long requiredSpace = (newCapacity - oldCapacity) * HeapSize.REFERENCE;
|
long requiredSpace = (newCapacity - oldCapacity) * ClassSize.REFERENCE;
|
||||||
|
|
||||||
// Verify/enforce we have sufficient memory to grow
|
// Verify/enforce we have sufficient memory to grow
|
||||||
checkAndFreeMemory(requiredSpace);
|
checkAndFreeMemory(requiredSpace);
|
||||||
|
@ -833,7 +834,6 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
*/
|
*/
|
||||||
private void init() {
|
private void init() {
|
||||||
memFree -= OVERHEAD;
|
memFree -= OVERHEAD;
|
||||||
memFree -= (entries.length * HeapSize.REFERENCE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//--------------------------------------------------------------------------
|
//--------------------------------------------------------------------------
|
||||||
|
@ -975,8 +975,9 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
protected long heapSize;
|
protected long heapSize;
|
||||||
|
|
||||||
/** The baseline overhead memory usage of this class */
|
/** The baseline overhead memory usage of this class */
|
||||||
static final int OVERHEAD = HeapSize.OBJECT + 5 * HeapSize.REFERENCE +
|
static final int OVERHEAD = ClassSize.OBJECT +
|
||||||
1 * Bytes.SIZEOF_INT + 1 * Bytes.SIZEOF_LONG;
|
5 * ClassSize.REFERENCE + 1 * Bytes.SIZEOF_INT +
|
||||||
|
1 * Bytes.SIZEOF_LONG;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new entry.
|
* Create a new entry.
|
||||||
|
@ -1139,8 +1140,8 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
* @return size of String in bytes
|
* @return size of String in bytes
|
||||||
*/
|
*/
|
||||||
private long heapSize(String s) {
|
private long heapSize(String s) {
|
||||||
return HeapSize.STRING_SIZE +
|
return ClassSize.STRING + ClassSize.align(ClassSize.ARRAY +
|
||||||
ClassSize.alignSize(s.length() * Bytes.SIZEOF_CHAR);
|
s.length() * Bytes.SIZEOF_CHAR);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1148,7 +1149,8 @@ implements HeapSize, Map<String,ByteBuffer>, BlockCache {
|
||||||
* @return size of ByteBuffer in bytes
|
* @return size of ByteBuffer in bytes
|
||||||
*/
|
*/
|
||||||
private long heapSize(ByteBuffer b) {
|
private long heapSize(ByteBuffer b) {
|
||||||
return HeapSize.BYTE_BUFFER + ClassSize.alignSize(b.capacity());
|
return ClassSize.BYTE_BUFFER +
|
||||||
|
ClassSize.align(ClassSize.ARRAY + b.capacity());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.regionserver;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.io.HeapSize;
|
import org.apache.hadoop.hbase.io.HeapSize;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.hbase.util.ClassSize;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.*;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
@ -42,7 +43,7 @@ public class HLogKey implements WritableComparable<HLogKey>, HeapSize {
|
||||||
private long logSeqNum;
|
private long logSeqNum;
|
||||||
// Time at which this edit was written.
|
// Time at which this edit was written.
|
||||||
private long writeTime;
|
private long writeTime;
|
||||||
private int HEAP_TAX = HeapSize.OBJECT + (2 * HeapSize.BYTE_ARRAY) +
|
private int HEAP_TAX = ClassSize.OBJECT + (2 * ClassSize.ARRAY) +
|
||||||
(2 * Bytes.SIZEOF_LONG);
|
(2 * Bytes.SIZEOF_LONG);
|
||||||
|
|
||||||
/** Writable Consructor -- Do not use. */
|
/** Writable Consructor -- Do not use. */
|
||||||
|
|
|
@ -28,13 +28,11 @@ import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.io.HeapSize;
|
import org.apache.hadoop.hbase.io.HeapSize;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.hbase.util.ClassSize;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The LruHashMap is a memory-aware HashMap with a configurable maximum
|
* The LruHashMap is a memory-aware HashMap with a configurable maximum
|
||||||
* memory footprint.
|
* memory footprint.
|
||||||
|
@ -67,8 +65,8 @@ implements HeapSize, Map<K,V> {
|
||||||
|
|
||||||
/** Memory overhead of this Object (for HeapSize) */
|
/** Memory overhead of this Object (for HeapSize) */
|
||||||
private static final int OVERHEAD = 5 * Bytes.SIZEOF_LONG +
|
private static final int OVERHEAD = 5 * Bytes.SIZEOF_LONG +
|
||||||
2 * Bytes.SIZEOF_INT + 2 * Bytes.SIZEOF_FLOAT + 3 * HeapSize.REFERENCE +
|
2 * Bytes.SIZEOF_INT + 2 * Bytes.SIZEOF_FLOAT + 3 * ClassSize.REFERENCE +
|
||||||
1 * HeapSize.ARRAY;
|
1 * ClassSize.ARRAY;
|
||||||
|
|
||||||
/** Load factor allowed (usually 75%) */
|
/** Load factor allowed (usually 75%) */
|
||||||
private final float loadFactor;
|
private final float loadFactor;
|
||||||
|
@ -119,7 +117,7 @@ implements HeapSize, Map<K,V> {
|
||||||
if (loadFactor <= 0 || Float.isNaN(loadFactor)) {
|
if (loadFactor <= 0 || Float.isNaN(loadFactor)) {
|
||||||
throw new IllegalArgumentException("Load factor must be > 0");
|
throw new IllegalArgumentException("Load factor must be > 0");
|
||||||
}
|
}
|
||||||
if (maxMemUsage <= (OVERHEAD + initialCapacity * HeapSize.REFERENCE)) {
|
if (maxMemUsage <= (OVERHEAD + initialCapacity * ClassSize.REFERENCE)) {
|
||||||
throw new IllegalArgumentException("Max memory usage too small to " +
|
throw new IllegalArgumentException("Max memory usage too small to " +
|
||||||
"support base overhead");
|
"support base overhead");
|
||||||
}
|
}
|
||||||
|
@ -472,7 +470,7 @@ implements HeapSize, Map<K,V> {
|
||||||
* @return baseline memory overhead of object in bytes
|
* @return baseline memory overhead of object in bytes
|
||||||
*/
|
*/
|
||||||
private long getMinimumUsage() {
|
private long getMinimumUsage() {
|
||||||
return OVERHEAD + (entries.length * HeapSize.REFERENCE);
|
return OVERHEAD + (entries.length * ClassSize.REFERENCE);
|
||||||
}
|
}
|
||||||
|
|
||||||
//--------------------------------------------------------------------------
|
//--------------------------------------------------------------------------
|
||||||
|
@ -693,7 +691,7 @@ implements HeapSize, Map<K,V> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine how much additional space will be required to grow the array
|
// Determine how much additional space will be required to grow the array
|
||||||
long requiredSpace = (newCapacity - oldCapacity) * HeapSize.REFERENCE;
|
long requiredSpace = (newCapacity - oldCapacity) * ClassSize.REFERENCE;
|
||||||
|
|
||||||
// Verify/enforce we have sufficient memory to grow
|
// Verify/enforce we have sufficient memory to grow
|
||||||
checkAndFreeMemory(requiredSpace);
|
checkAndFreeMemory(requiredSpace);
|
||||||
|
@ -802,7 +800,7 @@ implements HeapSize, Map<K,V> {
|
||||||
*/
|
*/
|
||||||
private void init() {
|
private void init() {
|
||||||
memFree -= OVERHEAD;
|
memFree -= OVERHEAD;
|
||||||
memFree -= (entries.length * HeapSize.REFERENCE);
|
memFree -= (entries.length * ClassSize.REFERENCE);
|
||||||
}
|
}
|
||||||
|
|
||||||
//--------------------------------------------------------------------------
|
//--------------------------------------------------------------------------
|
||||||
|
@ -927,8 +925,8 @@ implements HeapSize, Map<K,V> {
|
||||||
protected static class Entry<K extends HeapSize, V extends HeapSize>
|
protected static class Entry<K extends HeapSize, V extends HeapSize>
|
||||||
implements Map.Entry<K,V>, HeapSize {
|
implements Map.Entry<K,V>, HeapSize {
|
||||||
/** The baseline overhead memory usage of this class */
|
/** The baseline overhead memory usage of this class */
|
||||||
static final int OVERHEAD = 1 * Bytes.SIZEOF_LONG + 5 * HeapSize.REFERENCE +
|
static final int OVERHEAD = 1 * Bytes.SIZEOF_LONG +
|
||||||
2 * Bytes.SIZEOF_INT;
|
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT;
|
||||||
|
|
||||||
/** The key */
|
/** The key */
|
||||||
protected final K key;
|
protected final K key;
|
||||||
|
|
|
@ -22,10 +22,10 @@ package org.apache.hadoop.hbase.util;
|
||||||
|
|
||||||
import java.lang.reflect.Field;
|
import java.lang.reflect.Field;
|
||||||
import java.lang.reflect.Modifier;
|
import java.lang.reflect.Modifier;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.io.HeapSize;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class for determining the "size" of a class, an attempt to calculate the
|
* Class for determining the "size" of a class, an attempt to calculate the
|
||||||
|
@ -36,34 +36,72 @@ import org.apache.hadoop.hbase.io.HeapSize;
|
||||||
public class ClassSize {
|
public class ClassSize {
|
||||||
static final Log LOG = LogFactory.getLog(ClassSize.class);
|
static final Log LOG = LogFactory.getLog(ClassSize.class);
|
||||||
|
|
||||||
private int refSize;
|
private static int nrOfRefsPerObj = 2;
|
||||||
private int minObjectSize;
|
|
||||||
|
/** Array overhead */
|
||||||
|
public static int ARRAY = 0;
|
||||||
|
|
||||||
|
/** Overhead for ArrayList(0) */
|
||||||
|
public static int ARRAYLIST = 0;
|
||||||
|
|
||||||
|
/** Overhead for ByteBuffer */
|
||||||
|
public static int BYTE_BUFFER = 0;
|
||||||
|
|
||||||
|
/** Overhead for an Integer */
|
||||||
|
public static int INTEGER = 0;
|
||||||
|
|
||||||
|
/** Overhead for entry in map */
|
||||||
|
public static int MAP_ENTRY = 0;
|
||||||
|
|
||||||
|
/** Object overhead is minimum 2 * reference size (8 bytes on 64-bit) */
|
||||||
|
public static int OBJECT = 0;
|
||||||
|
|
||||||
|
/** Reference size is 8 bytes on 64-bit, 4 bytes on 32-bit */
|
||||||
|
public static int REFERENCE = 0;
|
||||||
|
|
||||||
|
/** String overhead */
|
||||||
|
public static int STRING = 0;
|
||||||
|
|
||||||
|
/** Overhead for TreeMap */
|
||||||
|
public static int TREEMAP = 0;
|
||||||
|
|
||||||
|
private static final String THIRTY_TWO = "32";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Method for reading the arc settings and setting overheads according
|
||||||
* @throws Exception
|
* to 32-bit or 64-bit architecture.
|
||||||
*/
|
*/
|
||||||
public ClassSize() throws Exception{
|
static {
|
||||||
// Figure out whether this is a 32 or 64 bit machine.
|
// Figure out whether this is a 32 or 64 bit machine.
|
||||||
Runtime runtime = Runtime.getRuntime();
|
Properties sysProps = System.getProperties();
|
||||||
int loops = 10;
|
String arcModel = sysProps.getProperty("sun.arch.data.model");
|
||||||
int sz = 0;
|
|
||||||
for(int i = 0; i < loops; i++) {
|
//Default value is set to 8, covering the case when arcModel is unknown
|
||||||
cleaner(runtime, i);
|
REFERENCE = 8;
|
||||||
long memBase = runtime.totalMemory() - runtime.freeMemory();
|
if (arcModel.equals(THIRTY_TWO)) {
|
||||||
Object[] junk = new Object[10000];
|
REFERENCE = 4;
|
||||||
cleaner(runtime, i);
|
|
||||||
long memUsed = runtime.totalMemory() - runtime.freeMemory() - memBase;
|
|
||||||
sz = (int)((memUsed + junk.length/2)/junk.length);
|
|
||||||
if(sz > 0 ) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ARRAY = 3 * REFERENCE;
|
||||||
|
|
||||||
refSize = ( 4 > sz) ? 4 : sz;
|
ARRAYLIST = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT +
|
||||||
minObjectSize = 4*refSize;
|
align(Bytes.SIZEOF_INT));
|
||||||
|
|
||||||
|
BYTE_BUFFER = align(OBJECT + REFERENCE + Bytes.SIZEOF_INT +
|
||||||
|
3 * Bytes.SIZEOF_BOOLEAN + 4 * Bytes.SIZEOF_INT + Bytes.SIZEOF_LONG);
|
||||||
|
|
||||||
|
INTEGER = align(OBJECT + Bytes.SIZEOF_INT);
|
||||||
|
|
||||||
|
MAP_ENTRY = align(OBJECT + 5 * REFERENCE + Bytes.SIZEOF_BOOLEAN);
|
||||||
|
|
||||||
|
OBJECT = 2 * REFERENCE;
|
||||||
|
|
||||||
|
TREEMAP = align(OBJECT + 2 * Bytes.SIZEOF_INT + (5+2) * REFERENCE +
|
||||||
|
ClassSize.align(OBJECT + Bytes.SIZEOF_INT));
|
||||||
|
|
||||||
|
STRING = align(OBJECT + REFERENCE + 3 * Bytes.SIZEOF_INT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The estimate of the size of a class instance depends on whether the JVM
|
* The estimate of the size of a class instance depends on whether the JVM
|
||||||
* uses 32 or 64 bit addresses, that is it depends on the size of an object
|
* uses 32 or 64 bit addresses, that is it depends on the size of an object
|
||||||
|
@ -78,10 +116,12 @@ public class ClassSize {
|
||||||
* primitives, the second the number of arrays and the third the number of
|
* primitives, the second the number of arrays and the third the number of
|
||||||
* references.
|
* references.
|
||||||
*/
|
*/
|
||||||
private int [] getSizeCoefficients(Class cl, boolean debug) {
|
@SuppressWarnings("unchecked")
|
||||||
|
private static int [] getSizeCoefficients(Class cl, boolean debug) {
|
||||||
int primitives = 0;
|
int primitives = 0;
|
||||||
int arrays = 0;
|
int arrays = 0;
|
||||||
int references = HeapSize.OBJECT / HeapSize.REFERENCE;
|
//The number of references that a new object takes
|
||||||
|
int references = nrOfRefsPerObj;
|
||||||
|
|
||||||
for( ; null != cl; cl = cl.getSuperclass()) {
|
for( ; null != cl; cl = cl.getSuperclass()) {
|
||||||
Field[] field = cl.getDeclaredFields();
|
Field[] field = cl.getDeclaredFields();
|
||||||
|
@ -91,8 +131,9 @@ public class ClassSize {
|
||||||
Class fieldClass = field[i].getType();
|
Class fieldClass = field[i].getType();
|
||||||
if( fieldClass.isArray()){
|
if( fieldClass.isArray()){
|
||||||
arrays++;
|
arrays++;
|
||||||
|
references++;
|
||||||
}
|
}
|
||||||
else if(! fieldClass.isPrimitive()){
|
else if(!fieldClass.isPrimitive()){
|
||||||
references++;
|
references++;
|
||||||
}
|
}
|
||||||
else {// Is simple primitive
|
else {// Is simple primitive
|
||||||
|
@ -136,21 +177,21 @@ public class ClassSize {
|
||||||
*
|
*
|
||||||
* @return the size estimate, in bytes
|
* @return the size estimate, in bytes
|
||||||
*/
|
*/
|
||||||
private long estimateBaseFromCoefficients(int [] coeff, boolean debug) {
|
private static long estimateBaseFromCoefficients(int [] coeff, boolean debug) {
|
||||||
int size = coeff[0] + (coeff[1]*4 + coeff[2])*refSize;
|
long size = coeff[0] + align(coeff[1]*ARRAY) + coeff[2]*REFERENCE;
|
||||||
|
|
||||||
// Round up to a multiple of 8
|
// Round up to a multiple of 8
|
||||||
size = (int)alignSize(size);
|
size = align(size);
|
||||||
if(debug) {
|
if(debug) {
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
// Write out region name as string and its encoded name.
|
// Write out region name as string and its encoded name.
|
||||||
LOG.debug("Primitives " + coeff[0] + ", arrays " + coeff[1] +
|
LOG.debug("Primitives " + coeff[0] + ", arrays " + coeff[1] +
|
||||||
", references(inlcuding " + HeapSize.OBJECT +
|
", references(inlcuding " + nrOfRefsPerObj +
|
||||||
", for object overhead) " + coeff[2] + ", refSize " + refSize +
|
", for object overhead) " + coeff[2] + ", refSize " + REFERENCE +
|
||||||
", size " + size);
|
", size " + size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return (size < minObjectSize) ? minObjectSize : size;
|
return size;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -162,33 +203,29 @@ public class ClassSize {
|
||||||
*
|
*
|
||||||
* @return the size estimate in bytes.
|
* @return the size estimate in bytes.
|
||||||
*/
|
*/
|
||||||
public long estimateBase(Class cl, boolean debug) {
|
@SuppressWarnings("unchecked")
|
||||||
|
public static long estimateBase(Class cl, boolean debug) {
|
||||||
return estimateBaseFromCoefficients( getSizeCoefficients(cl, debug), debug);
|
return estimateBaseFromCoefficients( getSizeCoefficients(cl, debug), debug);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tries to clear all the memory used to estimate the reference size for the
|
* Aligns a number to 8.
|
||||||
* current JVM
|
* @param num number to align to 8
|
||||||
* @param runtime
|
* @return smallest number >= input that is a multiple of 8
|
||||||
* @param i
|
|
||||||
* @throws Exception
|
|
||||||
*/
|
*/
|
||||||
private void cleaner(Runtime runtime, int i) throws Exception{
|
public static int align(int num) {
|
||||||
Thread.sleep(i*1000);
|
return (int)(align((long)num));
|
||||||
runtime.gc();runtime.gc(); runtime.gc();runtime.gc();runtime.gc();
|
|
||||||
runtime.runFinalization();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Aligns a number to 8.
|
* Aligns a number to 8.
|
||||||
* @param num number to align to 8
|
* @param num number to align to 8
|
||||||
* @return smallest number >= input that is a multiple of 8
|
* @return smallest number >= input that is a multiple of 8
|
||||||
*/
|
*/
|
||||||
public static long alignSize(int num) {
|
public static long align(long num) {
|
||||||
int aligned = (num + 7)/8;
|
//The 7 comes from that the alignSize is 8 which is the number of bytes
|
||||||
aligned *= 8;
|
//stored and sent together
|
||||||
return aligned;
|
return ((num + 7) >> 3) << 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
package org.apache.hadoop.hbase.io;
|
package org.apache.hadoop.hbase.io;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
|
@ -24,47 +26,46 @@ public class TestHeapSize extends TestCase {
|
||||||
* Testing the classes that implements HeapSize and are a part of 0.20.
|
* Testing the classes that implements HeapSize and are a part of 0.20.
|
||||||
* Some are not tested here for example BlockIndex which is tested in
|
* Some are not tested here for example BlockIndex which is tested in
|
||||||
* TestHFile since it is a non public class
|
* TestHFile since it is a non public class
|
||||||
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void testSizes() {
|
@SuppressWarnings("unchecked")
|
||||||
ClassSize cs = null;
|
public void testSizes() throws IOException {
|
||||||
Class cl = null;
|
Class cl = null;
|
||||||
long expected = 0L;
|
long expected = 0L;
|
||||||
long actual = 0L;
|
long actual = 0L;
|
||||||
try {
|
|
||||||
cs = new ClassSize();
|
|
||||||
} catch(Exception e) {}
|
|
||||||
|
|
||||||
//KeyValue
|
//KeyValue
|
||||||
cl = KeyValue.class;
|
cl = KeyValue.class;
|
||||||
expected = cs.estimateBase(cl, false);
|
expected = ClassSize.estimateBase(cl, false);
|
||||||
|
|
||||||
KeyValue kv = new KeyValue();
|
KeyValue kv = new KeyValue();
|
||||||
actual = kv.heapSize();
|
actual = kv.heapSize();
|
||||||
if(expected != actual) {
|
if(expected != actual) {
|
||||||
cs.estimateBase(cl, true);
|
ClassSize.estimateBase(cl, true);
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
//LruBlockCache
|
//LruBlockCache
|
||||||
cl = LruBlockCache.class;
|
cl = LruBlockCache.class;
|
||||||
expected = cs.estimateBase(cl, false);
|
expected = ClassSize.estimateBase(cl, false);
|
||||||
LruBlockCache c = new LruBlockCache(1,1,200);
|
LruBlockCache c = new LruBlockCache(1,1,200);
|
||||||
//Since minimum size for the for a LruBlockCache is 1
|
//Since minimum size for the for a LruBlockCache is 1
|
||||||
//we need to remove one reference from the heapsize
|
//we need to remove one reference from the heapsize
|
||||||
actual = c.heapSize() - HeapSize.REFERENCE;
|
actual = c.heapSize();// - ClassSize.REFERENCE_SIZE;
|
||||||
if(expected != actual) {
|
if(expected != actual) {
|
||||||
cs.estimateBase(cl, true);
|
ClassSize.estimateBase(cl, true);
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Put
|
//Put
|
||||||
cl = Put.class;
|
cl = Put.class;
|
||||||
expected = cs.estimateBase(cl, false);
|
expected = ClassSize.estimateBase(cl, false);
|
||||||
//The actual TreeMap is not included in the above calculation
|
//The actual TreeMap is not included in the above calculation
|
||||||
expected += HeapSize.TREEMAP_SIZE;
|
expected += ClassSize.TREEMAP;
|
||||||
Put put = new Put(Bytes.toBytes(""));
|
Put put = new Put(Bytes.toBytes(""));
|
||||||
actual = put.heapSize();
|
actual = put.heapSize();
|
||||||
if(expected != actual) {
|
if(expected != actual) {
|
||||||
cs.estimateBase(cl, true);
|
ClassSize.estimateBase(cl, true);
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFile.BlockIndex;
|
import org.apache.hadoop.hbase.io.hfile.HFile.BlockIndex;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
|
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
|
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
|
||||||
|
@ -251,25 +250,22 @@ public class TestHFile extends HBaseTestCase {
|
||||||
/**
|
/**
|
||||||
* Checks if the HeapSize calculator is within reason
|
* Checks if the HeapSize calculator is within reason
|
||||||
*/
|
*/
|
||||||
public void testHeapSizeForBlockIndex() {
|
@SuppressWarnings("unchecked")
|
||||||
ClassSize cs = null;
|
public void testHeapSizeForBlockIndex() throws IOException{
|
||||||
Class cl = null;
|
Class cl = null;
|
||||||
long expected = 0L;
|
long expected = 0L;
|
||||||
long actual = 0L;
|
long actual = 0L;
|
||||||
try {
|
|
||||||
cs = new ClassSize();
|
|
||||||
} catch(Exception e) {}
|
|
||||||
|
|
||||||
//KeyValue
|
|
||||||
cl = BlockIndex.class;
|
cl = BlockIndex.class;
|
||||||
expected = cs.estimateBase(cl, false);
|
expected = ClassSize.estimateBase(cl, false);
|
||||||
BlockIndex bi = new BlockIndex(Bytes.BYTES_RAWCOMPARATOR);
|
BlockIndex bi = new BlockIndex(Bytes.BYTES_RAWCOMPARATOR);
|
||||||
actual = bi.heapSize();
|
actual = bi.heapSize();
|
||||||
//Since we have a [[]] in BlockIndex and the checker only sees the [] we
|
//Since the arrays in BlockIndex(byte [][] blockKeys, long [] blockOffsets,
|
||||||
// miss a MULTI_ARRAY which is 4*Reference = 32 B
|
//int [] blockDataSizes) are all null they are not going to show up in the
|
||||||
actual -= 32;
|
//HeapSize calculation, so need to remove those array costs from ecpected.
|
||||||
|
expected -= ClassSize.align(3 * ClassSize.ARRAY);
|
||||||
if(expected != actual) {
|
if(expected != actual) {
|
||||||
cs.estimateBase(cl, true);
|
ClassSize.estimateBase(cl, true);
|
||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue