Checks the circuit breaker before allocating bytes for a new big array (#25010)

Previously, when allocating bytes for a BigArray, the array was created
(or attempted to be created) and only then would the array be checked
for the amount of RAM used to see if the circuit breaker should trip.

This is problematic because for very large arrays, if creating or
resizing the array, it is possible to attempt to create/resize and get
an OOM error before the circuit breaker trips, because the allocation
happens before checking with the circuit breaker.

This commit ensures that the circuit breaker is checked before all big
array allocations (note, this does not effect the array allocations that
are less than 16kb which use the [Type]ArrayWrapper classes found in
BigArrays.java).  If such an allocation or resizing would cause the
circuit breaker to trip, then the breaker trips before attempting to
allocate and potentially running into an OOM error from the JVM.

Closes #24790
This commit is contained in:
Ali Beyad 2017-06-02 15:16:22 -04:00 committed by GitHub
parent 3cb307462d
commit e024c67561
11 changed files with 180 additions and 58 deletions

View File

@ -41,7 +41,7 @@ abstract class AbstractArray implements BigArray {
public final void close() { public final void close() {
if (closed.compareAndSet(false, true)) { if (closed.compareAndSet(false, true)) {
try { try {
bigArrays.adjustBreaker(-ramBytesUsed()); bigArrays.adjustBreaker(-ramBytesUsed(), true);
} finally { } finally {
doClose(); doClose();
} }

View File

@ -87,6 +87,11 @@ abstract class AbstractBigArray extends AbstractArray {
@Override @Override
public final long ramBytesUsed() { public final long ramBytesUsed() {
return ramBytesEstimated(size);
}
/** Given the size of the array, estimate the number of bytes it will use. */
public final long ramBytesEstimated(final long size) {
// rough approximate, we only take into account the size of the values, not the overhead of the array objects // rough approximate, we only take into account the size of the values, not the overhead of the array objects
return ((long) pageIndex(size - 1) + 1) * pageSize() * numBytesPerElement(); return ((long) pageIndex(size - 1) + 1) * pageSize() * numBytesPerElement();
} }

View File

@ -25,7 +25,6 @@ import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.CircuitBreakingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.recycler.Recycler;
@ -91,7 +90,7 @@ public class BigArrays implements Releasable {
private abstract static class AbstractArrayWrapper extends AbstractArray implements BigArray { private abstract static class AbstractArrayWrapper extends AbstractArray implements BigArray {
protected static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ByteArrayWrapper.class); static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ByteArrayWrapper.class);
private final Releasable releasable; private final Releasable releasable;
private final long size; private final long size;
@ -377,6 +376,7 @@ public class BigArrays implements Releasable {
// Checking the breaker is disabled if not specified // Checking the breaker is disabled if not specified
this(new PageCacheRecycler(settings), breakerService, false); this(new PageCacheRecycler(settings), breakerService, false);
} }
// public for tests // public for tests
public BigArrays(PageCacheRecycler recycler, @Nullable final CircuitBreakerService breakerService, boolean checkBreaker) { public BigArrays(PageCacheRecycler recycler, @Nullable final CircuitBreakerService breakerService, boolean checkBreaker) {
this.checkBreaker = checkBreaker; this.checkBreaker = checkBreaker;
@ -392,9 +392,12 @@ public class BigArrays implements Releasable {
/** /**
* Adjust the circuit breaker with the given delta, if the delta is * Adjust the circuit breaker with the given delta, if the delta is
* negative, or checkBreaker is false, the breaker will be adjusted * negative, or checkBreaker is false, the breaker will be adjusted
* without tripping * without tripping. If the data was already created before calling
* this method, and the breaker trips, we add the delta without breaking
* to account for the created data. If the data has not been created yet,
* we do not add the delta to the breaker if it trips.
*/ */
void adjustBreaker(long delta) { void adjustBreaker(final long delta, final boolean isDataAlreadyCreated) {
if (this.breakerService != null) { if (this.breakerService != null) {
CircuitBreaker breaker = this.breakerService.getBreaker(CircuitBreaker.REQUEST); CircuitBreaker breaker = this.breakerService.getBreaker(CircuitBreaker.REQUEST);
if (this.checkBreaker) { if (this.checkBreaker) {
@ -404,9 +407,11 @@ public class BigArrays implements Releasable {
try { try {
breaker.addEstimateBytesAndMaybeBreak(delta, "<reused_arrays>"); breaker.addEstimateBytesAndMaybeBreak(delta, "<reused_arrays>");
} catch (CircuitBreakingException e) { } catch (CircuitBreakingException e) {
// since we've already created the data, we need to if (isDataAlreadyCreated) {
// add it so closing the stream re-adjusts properly // since we've already created the data, we need to
breaker.addWithoutBreaking(delta); // add it so closing the stream re-adjusts properly
breaker.addWithoutBreaking(delta);
}
// re-throw the original exception // re-throw the original exception
throw e; throw e;
} }
@ -435,15 +440,21 @@ public class BigArrays implements Releasable {
private <T extends AbstractBigArray> T resizeInPlace(T array, long newSize) { private <T extends AbstractBigArray> T resizeInPlace(T array, long newSize) {
final long oldMemSize = array.ramBytesUsed(); final long oldMemSize = array.ramBytesUsed();
assert oldMemSize == array.ramBytesEstimated(array.size) :
"ram bytes used should equal that which was previously estimated: ramBytesUsed=" +
oldMemSize + ", ramBytesEstimated=" + array.ramBytesEstimated(array.size);
final long estimatedIncreaseInBytes = array.ramBytesEstimated(newSize) - oldMemSize;
assert estimatedIncreaseInBytes >= 0 :
"estimated increase in bytes for resizing should not be negative: " + estimatedIncreaseInBytes;
adjustBreaker(estimatedIncreaseInBytes, false);
array.resize(newSize); array.resize(newSize);
adjustBreaker(array.ramBytesUsed() - oldMemSize);
return array; return array;
} }
private <T extends BigArray> T validate(T array) { private <T extends BigArray> T validate(T array) {
boolean success = false; boolean success = false;
try { try {
adjustBreaker(array.ramBytesUsed()); adjustBreaker(array.ramBytesUsed(), true);
success = true; success = true;
} finally { } finally {
if (!success) { if (!success) {
@ -459,16 +470,17 @@ public class BigArrays implements Releasable {
* @param clearOnResize whether values should be set to 0 on initialization and resize * @param clearOnResize whether values should be set to 0 on initialization and resize
*/ */
public ByteArray newByteArray(long size, boolean clearOnResize) { public ByteArray newByteArray(long size, boolean clearOnResize) {
final ByteArray array;
if (size > BYTE_PAGE_SIZE) { if (size > BYTE_PAGE_SIZE) {
array = new BigByteArray(size, this, clearOnResize); // when allocating big arrays, we want to first ensure we have the capacity by
// checking with the circuit breaker before attempting to allocate
adjustBreaker(BigByteArray.estimateRamBytes(size), false);
return new BigByteArray(size, this, clearOnResize);
} else if (size >= BYTE_PAGE_SIZE / 2 && recycler != null) { } else if (size >= BYTE_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<byte[]> page = recycler.bytePage(clearOnResize); final Recycler.V<byte[]> page = recycler.bytePage(clearOnResize);
array = new ByteArrayWrapper(this, page.v(), size, page, clearOnResize); return validate(new ByteArrayWrapper(this, page.v(), size, page, clearOnResize));
} else { } else {
array = new ByteArrayWrapper(this, new byte[(int) size], size, null, clearOnResize); return validate(new ByteArrayWrapper(this, new byte[(int) size], size, null, clearOnResize));
} }
return validate(array);
} }
/** /**
@ -541,16 +553,17 @@ public class BigArrays implements Releasable {
* @param clearOnResize whether values should be set to 0 on initialization and resize * @param clearOnResize whether values should be set to 0 on initialization and resize
*/ */
public IntArray newIntArray(long size, boolean clearOnResize) { public IntArray newIntArray(long size, boolean clearOnResize) {
final IntArray array;
if (size > INT_PAGE_SIZE) { if (size > INT_PAGE_SIZE) {
array = new BigIntArray(size, this, clearOnResize); // when allocating big arrays, we want to first ensure we have the capacity by
// checking with the circuit breaker before attempting to allocate
adjustBreaker(BigIntArray.estimateRamBytes(size), false);
return new BigIntArray(size, this, clearOnResize);
} else if (size >= INT_PAGE_SIZE / 2 && recycler != null) { } else if (size >= INT_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<int[]> page = recycler.intPage(clearOnResize); final Recycler.V<int[]> page = recycler.intPage(clearOnResize);
array = new IntArrayWrapper(this, page.v(), size, page, clearOnResize); return validate(new IntArrayWrapper(this, page.v(), size, page, clearOnResize));
} else { } else {
array = new IntArrayWrapper(this, new int[(int) size], size, null, clearOnResize); return validate(new IntArrayWrapper(this, new int[(int) size], size, null, clearOnResize));
} }
return validate(array);
} }
/** /**
@ -591,16 +604,17 @@ public class BigArrays implements Releasable {
* @param clearOnResize whether values should be set to 0 on initialization and resize * @param clearOnResize whether values should be set to 0 on initialization and resize
*/ */
public LongArray newLongArray(long size, boolean clearOnResize) { public LongArray newLongArray(long size, boolean clearOnResize) {
final LongArray array;
if (size > LONG_PAGE_SIZE) { if (size > LONG_PAGE_SIZE) {
array = new BigLongArray(size, this, clearOnResize); // when allocating big arrays, we want to first ensure we have the capacity by
// checking with the circuit breaker before attempting to allocate
adjustBreaker(BigLongArray.estimateRamBytes(size), false);
return new BigLongArray(size, this, clearOnResize);
} else if (size >= LONG_PAGE_SIZE / 2 && recycler != null) { } else if (size >= LONG_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<long[]> page = recycler.longPage(clearOnResize); final Recycler.V<long[]> page = recycler.longPage(clearOnResize);
array = new LongArrayWrapper(this, page.v(), size, page, clearOnResize); return validate(new LongArrayWrapper(this, page.v(), size, page, clearOnResize));
} else { } else {
array = new LongArrayWrapper(this, new long[(int) size], size, null, clearOnResize); return validate(new LongArrayWrapper(this, new long[(int) size], size, null, clearOnResize));
} }
return validate(array);
} }
/** /**
@ -641,16 +655,17 @@ public class BigArrays implements Releasable {
* @param clearOnResize whether values should be set to 0 on initialization and resize * @param clearOnResize whether values should be set to 0 on initialization and resize
*/ */
public DoubleArray newDoubleArray(long size, boolean clearOnResize) { public DoubleArray newDoubleArray(long size, boolean clearOnResize) {
final DoubleArray arr;
if (size > LONG_PAGE_SIZE) { if (size > LONG_PAGE_SIZE) {
arr = new BigDoubleArray(size, this, clearOnResize); // when allocating big arrays, we want to first ensure we have the capacity by
// checking with the circuit breaker before attempting to allocate
adjustBreaker(BigDoubleArray.estimateRamBytes(size), false);
return new BigDoubleArray(size, this, clearOnResize);
} else if (size >= LONG_PAGE_SIZE / 2 && recycler != null) { } else if (size >= LONG_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<long[]> page = recycler.longPage(clearOnResize); final Recycler.V<long[]> page = recycler.longPage(clearOnResize);
arr = new DoubleArrayWrapper(this, page.v(), size, page, clearOnResize); return validate(new DoubleArrayWrapper(this, page.v(), size, page, clearOnResize));
} else { } else {
arr = new DoubleArrayWrapper(this, new long[(int) size], size, null, clearOnResize); return validate(new DoubleArrayWrapper(this, new long[(int) size], size, null, clearOnResize));
} }
return validate(arr);
} }
/** Allocate a new {@link DoubleArray} of the given capacity. */ /** Allocate a new {@link DoubleArray} of the given capacity. */
@ -688,16 +703,17 @@ public class BigArrays implements Releasable {
* @param clearOnResize whether values should be set to 0 on initialization and resize * @param clearOnResize whether values should be set to 0 on initialization and resize
*/ */
public FloatArray newFloatArray(long size, boolean clearOnResize) { public FloatArray newFloatArray(long size, boolean clearOnResize) {
final FloatArray array;
if (size > INT_PAGE_SIZE) { if (size > INT_PAGE_SIZE) {
array = new BigFloatArray(size, this, clearOnResize); // when allocating big arrays, we want to first ensure we have the capacity by
// checking with the circuit breaker before attempting to allocate
adjustBreaker(BigFloatArray.estimateRamBytes(size), false);
return new BigFloatArray(size, this, clearOnResize);
} else if (size >= INT_PAGE_SIZE / 2 && recycler != null) { } else if (size >= INT_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<int[]> page = recycler.intPage(clearOnResize); final Recycler.V<int[]> page = recycler.intPage(clearOnResize);
array = new FloatArrayWrapper(this, page.v(), size, page, clearOnResize); return validate(new FloatArrayWrapper(this, page.v(), size, page, clearOnResize));
} else { } else {
array = new FloatArrayWrapper(this, new int[(int) size], size, null, clearOnResize); return validate(new FloatArrayWrapper(this, new int[(int) size], size, null, clearOnResize));
} }
return validate(array);
} }
/** Allocate a new {@link FloatArray} of the given capacity. */ /** Allocate a new {@link FloatArray} of the given capacity. */
@ -736,14 +752,16 @@ public class BigArrays implements Releasable {
public <T> ObjectArray<T> newObjectArray(long size) { public <T> ObjectArray<T> newObjectArray(long size) {
final ObjectArray<T> array; final ObjectArray<T> array;
if (size > OBJECT_PAGE_SIZE) { if (size > OBJECT_PAGE_SIZE) {
array = new BigObjectArray<>(size, this); // when allocating big arrays, we want to first ensure we have the capacity by
// checking with the circuit breaker before attempting to allocate
adjustBreaker(BigObjectArray.estimateRamBytes(size), false);
return new BigObjectArray<>(size, this);
} else if (size >= OBJECT_PAGE_SIZE / 2 && recycler != null) { } else if (size >= OBJECT_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<Object[]> page = recycler.objectPage(); final Recycler.V<Object[]> page = recycler.objectPage();
array = new ObjectArrayWrapper<>(this, page.v(), size, page); return validate(new ObjectArrayWrapper<>(this, page.v(), size, page));
} else { } else {
array = new ObjectArrayWrapper<>(this, new Object[(int) size], size, null); return validate(new ObjectArrayWrapper<>(this, new Object[(int) size], size, null));
} }
return validate(array);
} }
/** Resize the array to the exact provided size. */ /** Resize the array to the exact provided size. */

View File

@ -33,6 +33,8 @@ import static org.elasticsearch.common.util.BigArrays.BYTE_PAGE_SIZE;
*/ */
final class BigByteArray extends AbstractBigArray implements ByteArray { final class BigByteArray extends AbstractBigArray implements ByteArray {
private static final BigByteArray ESTIMATOR = new BigByteArray(0, BigArrays.NON_RECYCLING_INSTANCE, false);
private byte[][] pages; private byte[][] pages;
/** Constructor. */ /** Constructor. */
@ -44,7 +46,7 @@ final class BigByteArray extends AbstractBigArray implements ByteArray {
pages[i] = newBytePage(i); pages[i] = newBytePage(i);
} }
} }
@Override @Override
public byte get(long index) { public byte get(long index) {
final int pageIndex = pageIndex(index); final int pageIndex = pageIndex(index);
@ -147,4 +149,9 @@ final class BigByteArray extends AbstractBigArray implements ByteArray {
this.size = newSize; this.size = newSize;
} }
/** Estimates the number of bytes that would be consumed by an array of the given size. */
public static long estimateRamBytes(final long size) {
return ESTIMATOR.ramBytesEstimated(size);
}
} }

View File

@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.LONG_PAGE_SIZE;
*/ */
final class BigDoubleArray extends AbstractBigArray implements DoubleArray { final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
private static final BigDoubleArray ESTIMATOR = new BigDoubleArray(0, BigArrays.NON_RECYCLING_INSTANCE, false);
private long[][] pages; private long[][] pages;
/** Constructor. */ /** Constructor. */
@ -110,4 +112,9 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
} }
} }
/** Estimates the number of bytes that would be consumed by an array of the given size. */
public static long estimateRamBytes(final long size) {
return ESTIMATOR.ramBytesEstimated(size);
}
} }

View File

@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.INT_PAGE_SIZE;
*/ */
final class BigFloatArray extends AbstractBigArray implements FloatArray { final class BigFloatArray extends AbstractBigArray implements FloatArray {
private static final BigFloatArray ESTIMATOR = new BigFloatArray(0, BigArrays.NON_RECYCLING_INSTANCE, false);
private int[][] pages; private int[][] pages;
/** Constructor. */ /** Constructor. */
@ -110,4 +112,9 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
} }
} }
/** Estimates the number of bytes that would be consumed by an array of the given size. */
public static long estimateRamBytes(final long size) {
return ESTIMATOR.ramBytesEstimated(size);
}
} }

View File

@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.INT_PAGE_SIZE;
*/ */
final class BigIntArray extends AbstractBigArray implements IntArray { final class BigIntArray extends AbstractBigArray implements IntArray {
private static final BigIntArray ESTIMATOR = new BigIntArray(0, BigArrays.NON_RECYCLING_INSTANCE, false);
private int[][] pages; private int[][] pages;
/** Constructor. */ /** Constructor. */
@ -108,4 +110,9 @@ final class BigIntArray extends AbstractBigArray implements IntArray {
this.size = newSize; this.size = newSize;
} }
/** Estimates the number of bytes that would be consumed by an array of the given size. */
public static long estimateRamBytes(final long size) {
return ESTIMATOR.ramBytesEstimated(size);
}
} }

View File

@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.LONG_PAGE_SIZE;
*/ */
final class BigLongArray extends AbstractBigArray implements LongArray { final class BigLongArray extends AbstractBigArray implements LongArray {
private static final BigLongArray ESTIMATOR = new BigLongArray(0, BigArrays.NON_RECYCLING_INSTANCE, false);
private long[][] pages; private long[][] pages;
/** Constructor. */ /** Constructor. */
@ -111,4 +113,9 @@ final class BigLongArray extends AbstractBigArray implements LongArray {
} }
} }
/** Estimates the number of bytes that would be consumed by an array of the given size. */
public static long estimateRamBytes(final long size) {
return ESTIMATOR.ramBytesEstimated(size);
}
} }

View File

@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.OBJECT_PAGE_SIZE;
*/ */
final class BigObjectArray<T> extends AbstractBigArray implements ObjectArray<T> { final class BigObjectArray<T> extends AbstractBigArray implements ObjectArray<T> {
private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE);
private Object[][] pages; private Object[][] pages;
/** Constructor. */ /** Constructor. */
@ -85,4 +87,9 @@ final class BigObjectArray<T> extends AbstractBigArray implements ObjectArray<T>
this.size = newSize; this.size = newSize;
} }
} /** Estimates the number of bytes that would be consumed by an array of the given size. */
public static long estimateRamBytes(final long size) {
return ESTIMATOR.ramBytesEstimated(size);
}
}

View File

@ -33,6 +33,11 @@ import org.junit.Before;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Arrays; import java.util.Arrays;
import java.util.List;
import java.util.function.Function;
import static org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
public class BigArraysTests extends ESTestCase { public class BigArraysTests extends ESTestCase {
@ -330,22 +335,17 @@ public class BigArraysTests extends ESTestCase {
} }
public void testMaxSizeExceededOnNew() throws Exception { public void testMaxSizeExceededOnNew() throws Exception {
final int size = scaledRandomIntBetween(5, 1 << 22); final long size = scaledRandomIntBetween(5, 1 << 22);
for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) { final long maxSize = size - 1;
HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( for (BigArraysHelper bigArraysHelper : bigArrayCreators(maxSize, true)) {
Settings.builder()
.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), size - 1, ByteSizeUnit.BYTES)
.build(),
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking();
Method create = BigArrays.class.getMethod("new" + type + "Array", long.class);
try { try {
create.invoke(bigArrays, size); bigArraysHelper.arrayAllocator.apply(size);
fail("expected an exception on " + create); fail("circuit breaker should trip");
} catch (InvocationTargetException e) { } catch (CircuitBreakingException e) {
assertTrue(e.getCause() instanceof CircuitBreakingException); assertEquals(maxSize, e.getByteLimit());
assertThat(e.getBytesWanted(), greaterThanOrEqualTo(size));
} }
assertEquals(0, hcbs.getBreaker(CircuitBreaker.REQUEST).getUsed()); assertEquals(0, bigArraysHelper.bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed());
} }
} }
@ -354,7 +354,7 @@ public class BigArraysTests extends ESTestCase {
final long maxSize = randomIntBetween(1 << 10, 1 << 22); final long maxSize = randomIntBetween(1 << 10, 1 << 22);
HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService(
Settings.builder() Settings.builder()
.put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) .put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES)
.build(), .build(),
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking(); BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking();
@ -377,4 +377,63 @@ public class BigArraysTests extends ESTestCase {
} }
} }
public void testEstimatedBytesSameAsActualBytes() throws Exception {
final int maxSize = 1 << scaledRandomIntBetween(15, 22);
final long size = randomIntBetween((1 << 14) + 1, maxSize);
for (final BigArraysHelper bigArraysHelper : bigArrayCreators(maxSize, false)) {
final BigArray bigArray = bigArraysHelper.arrayAllocator.apply(size);
assertEquals(bigArraysHelper.ramEstimator.apply(size).longValue(), bigArray.ramBytesUsed());
}
}
private List<BigArraysHelper> bigArrayCreators(final long maxSize, final boolean withBreaking) {
final BigArrays byteBigArrays = newBigArraysInstance(maxSize, withBreaking);
BigArraysHelper byteHelper = new BigArraysHelper(byteBigArrays,
(Long size) -> byteBigArrays.newByteArray(size),
(Long size) -> BigByteArray.estimateRamBytes(size));
final BigArrays intBigArrays = newBigArraysInstance(maxSize, withBreaking);
BigArraysHelper intHelper = new BigArraysHelper(intBigArrays,
(Long size) -> intBigArrays.newIntArray(size),
(Long size) -> BigIntArray.estimateRamBytes(size));
final BigArrays longBigArrays = newBigArraysInstance(maxSize, withBreaking);
BigArraysHelper longHelper = new BigArraysHelper(longBigArrays,
(Long size) -> longBigArrays.newLongArray(size),
(Long size) -> BigLongArray.estimateRamBytes(size));
final BigArrays floatBigArrays = newBigArraysInstance(maxSize, withBreaking);
BigArraysHelper floatHelper = new BigArraysHelper(floatBigArrays,
(Long size) -> floatBigArrays.newFloatArray(size),
(Long size) -> BigFloatArray.estimateRamBytes(size));
final BigArrays doubleBigArrays = newBigArraysInstance(maxSize, withBreaking);
BigArraysHelper doubleHelper = new BigArraysHelper(doubleBigArrays,
(Long size) -> doubleBigArrays.newDoubleArray(size),
(Long size) -> BigDoubleArray.estimateRamBytes(size));
final BigArrays objectBigArrays = newBigArraysInstance(maxSize, withBreaking);
BigArraysHelper objectHelper = new BigArraysHelper(objectBigArrays,
(Long size) -> objectBigArrays.newObjectArray(size),
(Long size) -> BigObjectArray.estimateRamBytes(size));
return Arrays.asList(byteHelper, intHelper, longHelper, floatHelper, doubleHelper, objectHelper);
}
private BigArrays newBigArraysInstance(final long maxSize, final boolean withBreaking) {
HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService(
Settings.builder()
.put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES)
.build(),
new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
BigArrays bigArrays = new BigArrays(null, hcbs, false);
return (withBreaking ? bigArrays.withCircuitBreaking() : bigArrays);
}
private static class BigArraysHelper {
final BigArrays bigArrays;
final Function<Long, BigArray> arrayAllocator;
final Function<Long, Long> ramEstimator;
BigArraysHelper(BigArrays bigArrays, Function<Long, BigArray> arrayAllocator, Function<Long, Long> ramEstimator) {
this.bigArrays = bigArrays;
this.arrayAllocator = arrayAllocator;
this.ramEstimator = ramEstimator;
}
}
} }

View File

@ -34,8 +34,6 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Random; import java.util.Random;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;