HBASE-24659 Calculate FIXED_OVERHEAD automatically (#2018)

Co-authored-by: niuyulin <niuyulin@xiaomi.com>

SIgned-off-by: Duo Zhang <zhangduo@apache.org>
Signed-off-by: stack <stack@apache.org>
This commit is contained in:
niuyulin 2020-08-07 05:50:32 +08:00 committed by stack
parent c372ff19cf
commit e099aa5e20
7 changed files with 43 additions and 46 deletions

View File

@ -37,13 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public class HFileContext implements HeapSize, Cloneable { public class HFileContext implements HeapSize, Cloneable {
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileContext.class, false);
// Algorithm, checksumType, encoding, Encryption.Context, hfileName reference,
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG +
//byte[] headers for column family and table name
2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE);
private static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; private static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.io.hfile;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
/** /**
@ -42,7 +41,8 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable {
this(hfileName, offset, true, BlockType.DATA); this(hfileName, offset, true, BlockType.DATA);
} }
public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica, BlockType blockType) { public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica,
BlockType blockType) {
this.isPrimaryReplicaBlock = isPrimaryReplica; this.isPrimaryReplicaBlock = isPrimaryReplica;
this.hfileName = hfileName; this.hfileName = hfileName;
this.offset = offset; this.offset = offset;
@ -71,12 +71,7 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable {
return this.hfileName + '_' + this.offset; return this.hfileName + '_' + this.offset;
} }
public static final long FIXED_OVERHEAD = ClassSize.align( public static final long FIXED_OVERHEAD = ClassSize.estimateBase(BlockCacheKey.class, false);
ClassSize.OBJECT +
Bytes.SIZEOF_BOOLEAN +
ClassSize.REFERENCE + // this.hfileName
ClassSize.REFERENCE + // this.blockType
Bytes.SIZEOF_LONG); // this.offset
/** /**
* Strings have two bytes per character due to default Java Unicode encoding * Strings have two bytes per character due to default Java Unicode encoding

View File

@ -113,14 +113,7 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
@InterfaceAudience.Private @InterfaceAudience.Private
public class HFileBlock implements Cacheable { public class HFileBlock implements Cacheable {
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class); private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileBlock.class, false);
// BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator
5 * ClassSize.REFERENCE +
// On-disk size, uncompressed size, and next block's on-disk size
// bytePerChecksum and onDiskDataSize
4 * Bytes.SIZEOF_INT +
// This and previous block offset
2 * Bytes.SIZEOF_LONG);
// Block Header fields. // Block Header fields.

View File

@ -37,7 +37,6 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@ -583,10 +582,11 @@ public class LruBlockCache implements FirstLevelBlockCache {
int numEvicted = 0; int numEvicted = 0;
for (BlockCacheKey key : map.keySet()) { for (BlockCacheKey key : map.keySet()) {
if (key.getHfileName().equals(hfileName)) { if (key.getHfileName().equals(hfileName)) {
if (evictBlock(key)) if (evictBlock(key)) {
++numEvicted; ++numEvicted;
} }
} }
}
if (victimHandler != null) { if (victimHandler != null) {
numEvicted += victimHandler.evictBlocksByHfileName(hfileName); numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
} }
@ -657,7 +657,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
void evict() { void evict() {
// Ensure only one eviction at a time // Ensure only one eviction at a time
if(!evictionLock.tryLock()) return; if (!evictionLock.tryLock()) {
return;
}
try { try {
evictionInProgress = true; evictionInProgress = true;
@ -670,7 +672,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
StringUtils.byteDesc(currentSize)); StringUtils.byteDesc(currentSize));
} }
if (bytesToFree <= 0) return; if (bytesToFree <= 0) {
return;
}
// Instantiate priority buckets // Instantiate priority buckets
BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize()); BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize());
@ -945,7 +949,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
} }
} }
LruBlockCache cache = this.cache.get(); LruBlockCache cache = this.cache.get();
if (cache == null) break; if (cache == null) {
break;
}
cache.evict(); cache.evict();
} }
} }
@ -1022,10 +1028,8 @@ public class LruBlockCache implements FirstLevelBlockCache {
return this.stats; return this.stats;
} }
public final static long CACHE_FIXED_OVERHEAD = ClassSize.align( public final static long CACHE_FIXED_OVERHEAD =
(4 * Bytes.SIZEOF_LONG) + (11 * ClassSize.REFERENCE) + ClassSize.estimateBase(LruBlockCache.class, false);
(6 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN)
+ ClassSize.OBJECT);
@Override @Override
public long heapSize() { public long heapSize() {
@ -1093,9 +1097,13 @@ public class LruBlockCache implements FirstLevelBlockCache {
@Override @Override
public int compareTo(CachedBlock other) { public int compareTo(CachedBlock other) {
int diff = this.getFilename().compareTo(other.getFilename()); int diff = this.getFilename().compareTo(other.getFilename());
if (diff != 0) return diff; if (diff != 0) {
return diff;
}
diff = Long.compare(this.getOffset(), other.getOffset()); diff = Long.compare(this.getOffset(), other.getOffset());
if (diff != 0) return diff; if (diff != 0) {
return diff;
}
if (other.getCachedTime() < 0 || this.getCachedTime() < 0) { if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime()); throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime());
} }

View File

@ -8379,12 +8379,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
} }
} }
public static final long FIXED_OVERHEAD = ClassSize.align( public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HRegion.class, false);
ClassSize.OBJECT +
55 * ClassSize.REFERENCE +
3 * Bytes.SIZEOF_INT +
14 * Bytes.SIZEOF_LONG +
2 * Bytes.SIZEOF_BOOLEAN);
// woefully out of date - currently missing: // woefully out of date - currently missing:
// 1 x HashMap - coprocessorServiceHandlers // 1 x HashMap - coprocessorServiceHandlers

View File

@ -2580,9 +2580,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation,
return this.cacheConf; return this.cacheConf;
} }
public static final long FIXED_OVERHEAD = public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HStore.class, false);
ClassSize.align(ClassSize.OBJECT + (29 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)
+ (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));
public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD
+ ClassSize.OBJECT + ClassSize.REENTRANT_LOCK + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK

View File

@ -602,5 +602,19 @@ public class TestHeapSize {
assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8); assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
} }
} }
}
@Test
public void testAutoCalcFixedOverHead() {
Class[] classList = new Class[] { HFileContext.class, HRegion.class, BlockCacheKey.class,
HFileBlock.class, HStore.class, LruBlockCache.class };
for (Class cl : classList) {
// do estimate in advance to ensure class is loaded
ClassSize.estimateBase(cl, false);
long startTime = System.currentTimeMillis();
ClassSize.estimateBase(cl, false);
long endTime = System.currentTimeMillis();
assertTrue(endTime - startTime < 5);
}
}
}