HBASE-24659 Calculate FIXED_OVERHEAD automatically (#2018)
Co-authored-by: niuyulin <niuyulin@xiaomi.com> SIgned-off-by: Duo Zhang <zhangduo@apache.org> Signed-off-by: stack <stack@apache.org>
This commit is contained in:
parent
c372ff19cf
commit
e099aa5e20
|
@ -37,13 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class HFileContext implements HeapSize, Cloneable {
|
||||
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
|
||||
// Algorithm, checksumType, encoding, Encryption.Context, hfileName reference,
|
||||
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
|
||||
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
|
||||
4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG +
|
||||
//byte[] headers for column family and table name
|
||||
2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE);
|
||||
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileContext.class, false);
|
||||
|
||||
private static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.io.hfile;
|
|||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
|
||||
/**
|
||||
|
@ -42,7 +41,8 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable {
|
|||
this(hfileName, offset, true, BlockType.DATA);
|
||||
}
|
||||
|
||||
public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica, BlockType blockType) {
|
||||
public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica,
|
||||
BlockType blockType) {
|
||||
this.isPrimaryReplicaBlock = isPrimaryReplica;
|
||||
this.hfileName = hfileName;
|
||||
this.offset = offset;
|
||||
|
@ -71,12 +71,7 @@ public class BlockCacheKey implements HeapSize, java.io.Serializable {
|
|||
return this.hfileName + '_' + this.offset;
|
||||
}
|
||||
|
||||
public static final long FIXED_OVERHEAD = ClassSize.align(
|
||||
ClassSize.OBJECT +
|
||||
Bytes.SIZEOF_BOOLEAN +
|
||||
ClassSize.REFERENCE + // this.hfileName
|
||||
ClassSize.REFERENCE + // this.blockType
|
||||
Bytes.SIZEOF_LONG); // this.offset
|
||||
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(BlockCacheKey.class, false);
|
||||
|
||||
/**
|
||||
* Strings have two bytes per character due to default Java Unicode encoding
|
||||
|
|
|
@ -113,14 +113,7 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
|
|||
@InterfaceAudience.Private
|
||||
public class HFileBlock implements Cacheable {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
|
||||
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
|
||||
// BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator
|
||||
5 * ClassSize.REFERENCE +
|
||||
// On-disk size, uncompressed size, and next block's on-disk size
|
||||
// bytePerChecksum and onDiskDataSize
|
||||
4 * Bytes.SIZEOF_INT +
|
||||
// This and previous block offset
|
||||
2 * Bytes.SIZEOF_LONG);
|
||||
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileBlock.class, false);
|
||||
|
||||
// Block Header fields.
|
||||
|
||||
|
|
|
@ -37,7 +37,6 @@ import java.util.concurrent.locks.ReentrantLock;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ClassSize;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
@ -583,10 +582,11 @@ public class LruBlockCache implements FirstLevelBlockCache {
|
|||
int numEvicted = 0;
|
||||
for (BlockCacheKey key : map.keySet()) {
|
||||
if (key.getHfileName().equals(hfileName)) {
|
||||
if (evictBlock(key))
|
||||
if (evictBlock(key)) {
|
||||
++numEvicted;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (victimHandler != null) {
|
||||
numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
|
||||
}
|
||||
|
@ -657,7 +657,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
|
|||
void evict() {
|
||||
|
||||
// Ensure only one eviction at a time
|
||||
if(!evictionLock.tryLock()) return;
|
||||
if (!evictionLock.tryLock()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
evictionInProgress = true;
|
||||
|
@ -670,7 +672,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
|
|||
StringUtils.byteDesc(currentSize));
|
||||
}
|
||||
|
||||
if (bytesToFree <= 0) return;
|
||||
if (bytesToFree <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Instantiate priority buckets
|
||||
BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize());
|
||||
|
@ -945,7 +949,9 @@ public class LruBlockCache implements FirstLevelBlockCache {
|
|||
}
|
||||
}
|
||||
LruBlockCache cache = this.cache.get();
|
||||
if (cache == null) break;
|
||||
if (cache == null) {
|
||||
break;
|
||||
}
|
||||
cache.evict();
|
||||
}
|
||||
}
|
||||
|
@ -1022,10 +1028,8 @@ public class LruBlockCache implements FirstLevelBlockCache {
|
|||
return this.stats;
|
||||
}
|
||||
|
||||
public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
|
||||
(4 * Bytes.SIZEOF_LONG) + (11 * ClassSize.REFERENCE) +
|
||||
(6 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN)
|
||||
+ ClassSize.OBJECT);
|
||||
public final static long CACHE_FIXED_OVERHEAD =
|
||||
ClassSize.estimateBase(LruBlockCache.class, false);
|
||||
|
||||
@Override
|
||||
public long heapSize() {
|
||||
|
@ -1093,9 +1097,13 @@ public class LruBlockCache implements FirstLevelBlockCache {
|
|||
@Override
|
||||
public int compareTo(CachedBlock other) {
|
||||
int diff = this.getFilename().compareTo(other.getFilename());
|
||||
if (diff != 0) return diff;
|
||||
if (diff != 0) {
|
||||
return diff;
|
||||
}
|
||||
diff = Long.compare(this.getOffset(), other.getOffset());
|
||||
if (diff != 0) return diff;
|
||||
if (diff != 0) {
|
||||
return diff;
|
||||
}
|
||||
if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
|
||||
throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime());
|
||||
}
|
||||
|
|
|
@ -8379,12 +8379,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
|
|||
}
|
||||
}
|
||||
|
||||
public static final long FIXED_OVERHEAD = ClassSize.align(
|
||||
ClassSize.OBJECT +
|
||||
55 * ClassSize.REFERENCE +
|
||||
3 * Bytes.SIZEOF_INT +
|
||||
14 * Bytes.SIZEOF_LONG +
|
||||
2 * Bytes.SIZEOF_BOOLEAN);
|
||||
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HRegion.class, false);
|
||||
|
||||
// woefully out of date - currently missing:
|
||||
// 1 x HashMap - coprocessorServiceHandlers
|
||||
|
|
|
@ -2580,9 +2580,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation,
|
|||
return this.cacheConf;
|
||||
}
|
||||
|
||||
public static final long FIXED_OVERHEAD =
|
||||
ClassSize.align(ClassSize.OBJECT + (29 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)
|
||||
+ (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));
|
||||
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HStore.class, false);
|
||||
|
||||
public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD
|
||||
+ ClassSize.OBJECT + ClassSize.REENTRANT_LOCK
|
||||
|
|
|
@ -602,5 +602,19 @@ public class TestHeapSize {
|
|||
assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAutoCalcFixedOverHead() {
|
||||
Class[] classList = new Class[] { HFileContext.class, HRegion.class, BlockCacheKey.class,
|
||||
HFileBlock.class, HStore.class, LruBlockCache.class };
|
||||
for (Class cl : classList) {
|
||||
// do estimate in advance to ensure class is loaded
|
||||
ClassSize.estimateBase(cl, false);
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
ClassSize.estimateBase(cl, false);
|
||||
long endTime = System.currentTimeMillis();
|
||||
assertTrue(endTime - startTime < 5);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue