HBASE-27437 TestHeapSize is flaky (#4841)

Signed-off-by: GeorryHuang <huangzhuoyue@apache.org>
(cherry picked from commit dad9a7da92)
This commit is contained in:
Duo Zhang 2022-10-24 10:27:33 +08:00
parent 2716a03230
commit 7b0d705a1a
1 changed files with 29 additions and 13 deletions

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.hbase.io; package org.apache.hadoop.hbase.io;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.lessThan;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@ -25,13 +27,16 @@ import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean; import java.lang.management.RuntimeMXBean;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@ -69,7 +74,6 @@ import org.apache.hadoop.hbase.regionserver.throttle.StoreHotnessProtector;
import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
@ -602,18 +606,30 @@ public class TestHeapSize {
} }
} }
@Test private long calcFixedOverhead(List<Class<?>> classList) {
public void testAutoCalcFixedOverHead() { long overhead = 0;
Class[] classList = new Class[] { HFileContext.class, HRegion.class, BlockCacheKey.class, for (Class<?> clazz : classList) {
HFileBlock.class, HStore.class, LruBlockCache.class, StoreContext.class }; overhead += ClassSize.estimateBase(clazz, false);
for (Class cl : classList) { }
// do estimate in advance to ensure class is loaded return overhead;
ClassSize.estimateBase(cl, false); }
long startTime = EnvironmentEdgeManager.currentTime(); @Test
ClassSize.estimateBase(cl, false); public void testAutoCalcFixedOverhead() throws InterruptedException {
long endTime = EnvironmentEdgeManager.currentTime(); List<Class<?>> classList = Arrays.asList(HFileContext.class, HRegion.class, BlockCacheKey.class,
assertTrue(endTime - startTime < 5); HFileBlock.class, HStore.class, LruBlockCache.class, StoreContext.class);
} for (int i = 0; i < 10; i++) {
// warm up
calcFixedOverhead(classList);
}
long startNs = System.nanoTime();
long overhead = 0;
for (int i = 0; i < 100; i++) {
overhead += calcFixedOverhead(classList);
}
long costNs = System.nanoTime() - startNs;
LOG.info("overhead = {}, cost {} ns", overhead, costNs);
// the single computation cost should be less than 5ms
assertThat(costNs, lessThan(TimeUnit.MILLISECONDS.toNanos(5) * classList.size() * 100));
} }
} }