Internal: clean up memory reuse a bit.

- don't allow for soft references anymore in the recycler
 - remove some abusive thread locals
 - don't recycle independently float/double and int/long pages, they are the
   same and just interpret bits differently.

Close #9272
This commit is contained in:
Adrien Grand 2015-01-13 11:43:13 +01:00
parent d9165dfe73
commit a56520d26d
11 changed files with 64 additions and 226 deletions

View File

@ -46,16 +46,12 @@ public class PageCacheRecycler extends AbstractComponent {
private final Recycler<byte[]> bytePage;
private final Recycler<int[]> intPage;
private final Recycler<long[]> longPage;
private final Recycler<float[]> floatPage;
private final Recycler<double[]> doublePage;
private final Recycler<Object[]> objectPage;
public void close() {
bytePage.close();
intPage.close();
longPage.close();
floatPage.close();
doublePage.close();
objectPage.close();
}
@ -104,12 +100,10 @@ public class PageCacheRecycler extends AbstractComponent {
final double bytesWeight = componentSettings.getAsDouble(WEIGHT + ".bytes", 1d);
final double intsWeight = componentSettings.getAsDouble(WEIGHT + ".ints", 1d);
final double longsWeight = componentSettings.getAsDouble(WEIGHT + ".longs", 1d);
final double floatsWeight = componentSettings.getAsDouble(WEIGHT + ".floats", 1d);
final double doublesWeight = componentSettings.getAsDouble(WEIGHT + ".doubles", 1d);
// object pages are less useful to us so we give them a lower weight by default
final double objectsWeight = componentSettings.getAsDouble(WEIGHT + ".objects", 0.1d);
final double totalWeight = bytesWeight + intsWeight + longsWeight + doublesWeight + objectsWeight;
final double totalWeight = bytesWeight + intsWeight + longsWeight + objectsWeight;
bytePage = build(type, maxCount(limit, BigArrays.BYTE_PAGE_SIZE, bytesWeight, totalWeight), searchThreadPoolSize, availableProcessors, new AbstractRecyclerC<byte[]>() {
@Override
@ -141,26 +135,6 @@ public class PageCacheRecycler extends AbstractComponent {
// nothing to do
}
});
floatPage = build(type, maxCount(limit, BigArrays.FLOAT_PAGE_SIZE, floatsWeight, totalWeight), searchThreadPoolSize, availableProcessors, new AbstractRecyclerC<float[]>() {
@Override
public float[] newInstance(int sizing) {
return new float[BigArrays.FLOAT_PAGE_SIZE];
}
@Override
public void recycle(float[] value) {
// nothing to do
}
});
doublePage = build(type, maxCount(limit, BigArrays.DOUBLE_PAGE_SIZE, doublesWeight, totalWeight), searchThreadPoolSize, availableProcessors, new AbstractRecyclerC<double[]>() {
@Override
public double[] newInstance(int sizing) {
return new double[BigArrays.DOUBLE_PAGE_SIZE];
}
@Override
public void recycle(double[] value) {
// nothing to do
}
});
objectPage = build(type, maxCount(limit, BigArrays.OBJECT_PAGE_SIZE, objectsWeight, totalWeight), searchThreadPoolSize, availableProcessors, new AbstractRecyclerC<Object[]>() {
@Override
public Object[] newInstance(int sizing) {
@ -197,22 +171,6 @@ public class PageCacheRecycler extends AbstractComponent {
return v;
}
public Recycler.V<float[]> floatPage(boolean clear) {
final Recycler.V<float[]> v = floatPage.obtain();
if (v.isRecycled() && clear) {
Arrays.fill(v.v(), 0f);
}
return v;
}
public Recycler.V<double[]> doublePage(boolean clear) {
final Recycler.V<double[]> v = doublePage.obtain();
if (v.isRecycled() && clear) {
Arrays.fill(v.v(), 0d);
}
return v;
}
public Recycler.V<Object[]> objectPage() {
// object pages are cleared on release anyway
return objectPage.obtain();
@ -235,12 +193,6 @@ public class PageCacheRecycler extends AbstractComponent {
return concurrentDeque(c, limit);
}
},
SOFT_CONCURRENT {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) {
return concurrent(softFactory(dequeFactory(c, limit / availableProcessors)), availableProcessors);
}
},
CONCURRENT {
@Override
<T> Recycler<T> build(Recycler.C<T> c, int limit, int estimatedThreadPoolSize, int availableProcessors) {

View File

@ -91,45 +91,6 @@ public enum Recyclers {
};
}
/**
* Create a recycler that is wrapped inside a soft reference, so that it cannot cause {@link OutOfMemoryError}s.
*/
public static <T> Recycler<T> soft(final Recycler.Factory<T> factory) {
return new FilterRecycler<T>() {
SoftReference<Recycler<T>> ref;
{
ref = new SoftReference<>(null);
}
@Override
protected Recycler<T> getDelegate() {
Recycler<T> recycler = ref.get();
if (recycler == null) {
recycler = factory.build();
ref = new SoftReference<>(recycler);
}
return recycler;
}
};
}
/**
* Create a recycler that wraps data in a SoftReference.
*
* @see #soft(org.elasticsearch.common.recycler.Recycler.Factory)
*/
public static <T> Recycler.Factory<T> softFactory(final Recycler.Factory<T> factory) {
return new Recycler.Factory<T>() {
@Override
public Recycler<T> build() {
return soft(factory);
}
};
}
/**
* Wrap the provided recycler so that calls to {@link Recycler#obtain()} and {@link Recycler.V#close()} are protected by
* a lock.

View File

@ -32,8 +32,6 @@ import java.util.Arrays;
/** Common implementation for array lists that slice data into fixed-size blocks. */
abstract class AbstractBigArray extends AbstractArray {
private static final long EMPTY_SIZE = RamUsageEstimator.shallowSizeOfInstance(AbstractBigArray.class) + RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
private final PageCacheRecycler recycler;
private Recycler.V<?>[] cache;
@ -130,24 +128,6 @@ abstract class AbstractBigArray extends AbstractArray {
}
}
protected final float[] newFloatPage(int page) {
if (recycler != null) {
final Recycler.V<float[]> v = recycler.floatPage(clearOnResize);
return registerNewPage(v, page, BigArrays.FLOAT_PAGE_SIZE);
} else {
return new float[BigArrays.FLOAT_PAGE_SIZE];
}
}
protected final double[] newDoublePage(int page) {
if (recycler != null) {
final Recycler.V<double[]> v = recycler.doublePage(clearOnResize);
return registerNewPage(v, page, BigArrays.DOUBLE_PAGE_SIZE);
} else {
return new double[BigArrays.DOUBLE_PAGE_SIZE];
}
}
protected final Object[] newObjectPage(int page) {
if (recycler != null) {
final Recycler.V<Object[]> v = recycler.objectPage();

View File

@ -37,7 +37,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicLong;
/** Utility class to work with arrays. */
public class BigArrays extends AbstractComponent {
@ -48,9 +47,7 @@ public class BigArrays extends AbstractComponent {
public static final int PAGE_SIZE_IN_BYTES = 1 << 14;
public static final int BYTE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_BYTE;
public static final int INT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_INT;
public static final int FLOAT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_FLOAT;
public static final int LONG_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_LONG;
public static final int DOUBLE_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_DOUBLE;
public static final int OBJECT_PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES / RamUsageEstimator.NUM_BYTES_OBJECT_REF;
/** Returns the next size to grow when working with parallel arrays that may have different page sizes or number of bytes per element. */
@ -247,9 +244,9 @@ public class BigArrays extends AbstractComponent {
private static class DoubleArrayWrapper extends AbstractArrayWrapper implements DoubleArray {
private final double[] array;
private final long[] array;
DoubleArrayWrapper(BigArrays bigArrays, double[] array, long size, Recycler.V<double[]> releasable, boolean clearOnResize) {
DoubleArrayWrapper(BigArrays bigArrays, long[] array, long size, Recycler.V<long[]> releasable, boolean clearOnResize) {
super(bigArrays, size, releasable, clearOnResize);
this.array = array;
}
@ -262,37 +259,37 @@ public class BigArrays extends AbstractComponent {
@Override
public double get(long index) {
assert indexIsInt(index);
return array[(int) index];
return Double.longBitsToDouble(array[(int) index]);
}
@Override
public double set(long index, double value) {
assert indexIsInt(index);
double ret = array[(int) index];
array[(int) index] = value;
double ret = Double.longBitsToDouble(array[(int) index]);
array[(int) index] = Double.doubleToRawLongBits(value);
return ret;
}
@Override
public double increment(long index, double inc) {
assert indexIsInt(index);
return array[(int) index] += inc;
return array[(int) index] = Double.doubleToRawLongBits(Double.longBitsToDouble(array[(int) index]) + inc);
}
@Override
public void fill(long fromIndex, long toIndex, double value) {
assert indexIsInt(fromIndex);
assert indexIsInt(toIndex);
Arrays.fill(array, (int) fromIndex, (int) toIndex, value);
Arrays.fill(array, (int) fromIndex, (int) toIndex, Double.doubleToRawLongBits(value));
}
}
private static class FloatArrayWrapper extends AbstractArrayWrapper implements FloatArray {
private final float[] array;
private final int[] array;
FloatArrayWrapper(BigArrays bigArrays, float[] array, long size, Recycler.V<float[]> releasable, boolean clearOnResize) {
FloatArrayWrapper(BigArrays bigArrays, int[] array, long size, Recycler.V<int[]> releasable, boolean clearOnResize) {
super(bigArrays, size, releasable, clearOnResize);
this.array = array;
}
@ -305,28 +302,28 @@ public class BigArrays extends AbstractComponent {
@Override
public float get(long index) {
assert indexIsInt(index);
return array[(int) index];
return Float.intBitsToFloat(array[(int) index]);
}
@Override
public float set(long index, float value) {
assert indexIsInt(index);
float ret = array[(int) index];
array[(int) index] = value;
float ret = Float.intBitsToFloat(array[(int) index]);
array[(int) index] = Float.floatToRawIntBits(value);
return ret;
}
@Override
public float increment(long index, float inc) {
assert indexIsInt(index);
return array[(int) index] += inc;
return array[(int) index] = Float.floatToRawIntBits(Float.intBitsToFloat(array[(int) index]) + inc);
}
@Override
public void fill(long fromIndex, long toIndex, float value) {
assert indexIsInt(fromIndex);
assert indexIsInt(toIndex);
Arrays.fill(array, (int) fromIndex, (int) toIndex, value);
Arrays.fill(array, (int) fromIndex, (int) toIndex, Float.floatToRawIntBits(value));
}
}
@ -629,13 +626,13 @@ public class BigArrays extends AbstractComponent {
*/
public DoubleArray newDoubleArray(long size, boolean clearOnResize) {
final DoubleArray arr;
if (size > DOUBLE_PAGE_SIZE) {
if (size > LONG_PAGE_SIZE) {
arr = new BigDoubleArray(size, this, clearOnResize);
} else if (size >= DOUBLE_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<double[]> page = recycler.doublePage(clearOnResize);
} else if (size >= LONG_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<long[]> page = recycler.longPage(clearOnResize);
arr = new DoubleArrayWrapper(this, page.v(), size, page, clearOnResize);
} else {
arr = new DoubleArrayWrapper(this, new double[(int) size], size, null, clearOnResize);
arr = new DoubleArrayWrapper(this, new long[(int) size], size, null, clearOnResize);
}
return validate(arr);
}
@ -665,7 +662,7 @@ public class BigArrays extends AbstractComponent {
if (minSize <= array.size()) {
return array;
}
final long newSize = overSize(minSize, DOUBLE_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_DOUBLE);
final long newSize = overSize(minSize, LONG_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_LONG);
return resize(array, newSize);
}
@ -676,13 +673,13 @@ public class BigArrays extends AbstractComponent {
*/
public FloatArray newFloatArray(long size, boolean clearOnResize) {
final FloatArray array;
if (size > FLOAT_PAGE_SIZE) {
if (size > INT_PAGE_SIZE) {
array = new BigFloatArray(size, this, clearOnResize);
} else if (size >= FLOAT_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<float[]> page = recycler.floatPage(clearOnResize);
} else if (size >= INT_PAGE_SIZE / 2 && recycler != null) {
final Recycler.V<int[]> page = recycler.intPage(clearOnResize);
array = new FloatArrayWrapper(this, page.v(), size, page, clearOnResize);
} else {
array = new FloatArrayWrapper(this, new float[(int) size], size, null, clearOnResize);
array = new FloatArrayWrapper(this, new int[(int) size], size, null, clearOnResize);
}
return validate(array);
}
@ -712,7 +709,7 @@ public class BigArrays extends AbstractComponent {
if (minSize <= array.size()) {
return array;
}
final long newSize = overSize(minSize, FLOAT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_FLOAT);
final long newSize = overSize(minSize, INT_PAGE_SIZE, RamUsageEstimator.NUM_BYTES_FLOAT);
return resize(array, newSize);
}

View File

@ -20,12 +20,13 @@
package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
import java.util.Arrays;
import static org.elasticsearch.common.util.BigArrays.DOUBLE_PAGE_SIZE;
import static org.elasticsearch.common.util.BigArrays.LONG_PAGE_SIZE;
/**
* Double array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of
@ -33,15 +34,15 @@ import static org.elasticsearch.common.util.BigArrays.DOUBLE_PAGE_SIZE;
*/
final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
private double[][] pages;
private long[][] pages;
/** Constructor. */
public BigDoubleArray(long size, BigArrays bigArrays, boolean clearOnResize) {
super(DOUBLE_PAGE_SIZE, bigArrays, clearOnResize);
super(LONG_PAGE_SIZE, bigArrays, clearOnResize);
this.size = size;
pages = new double[numPages(size)][];
pages = new long[numPages(size)][];
for (int i = 0; i < pages.length; ++i) {
pages[i] = newDoublePage(i);
pages[i] = newLongPage(i);
}
}
@ -49,16 +50,16 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
public double get(long index) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage];
return Double.longBitsToDouble(pages[pageIndex][indexInPage]);
}
@Override
public double set(long index, double value) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
final double[] page = pages[pageIndex];
final double ret = page[indexInPage];
page[indexInPage] = value;
final long[] page = pages[pageIndex];
final double ret = Double.longBitsToDouble(page[indexInPage]);
page[indexInPage] = Double.doubleToRawLongBits(value);
return ret;
}
@ -66,7 +67,8 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
public double increment(long index, double inc) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage] += inc;
final long[] page = pages[pageIndex];
return page[indexInPage] = Double.doubleToRawLongBits(Double.longBitsToDouble(page[indexInPage]) + inc);
}
@Override
@ -81,7 +83,7 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
pages = Arrays.copyOf(pages, ArrayUtil.oversize(numPages, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
}
for (int i = numPages - 1; i >= 0 && pages[i] == null; --i) {
pages[i] = newDoublePage(i);
pages[i] = newLongPage(i);
}
for (int i = numPages; i < pages.length && pages[i] != null; ++i) {
pages[i] = null;
@ -93,16 +95,17 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
@Override
public void fill(long fromIndex, long toIndex, double value) {
Preconditions.checkArgument(fromIndex <= toIndex);
final long longBits = Double.doubleToRawLongBits(value);
final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1);
if (fromPage == toPage) {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value);
Arrays.fill(pages[fromPage], indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, longBits);
} else {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), pages[fromPage].length, value);
Arrays.fill(pages[fromPage], indexInPage(fromIndex), pages[fromPage].length, longBits);
for (int i = fromPage + 1; i < toPage; ++i) {
Arrays.fill(pages[i], value);
Arrays.fill(pages[i], longBits);
}
Arrays.fill(pages[toPage], 0, indexInPage(toIndex - 1) + 1, value);
Arrays.fill(pages[toPage], 0, indexInPage(toIndex - 1) + 1, longBits);
}
}

View File

@ -20,12 +20,13 @@
package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
import java.util.Arrays;
import static org.elasticsearch.common.util.BigArrays.FLOAT_PAGE_SIZE;
import static org.elasticsearch.common.util.BigArrays.INT_PAGE_SIZE;
/**
* Float array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of
@ -33,15 +34,15 @@ import static org.elasticsearch.common.util.BigArrays.FLOAT_PAGE_SIZE;
*/
final class BigFloatArray extends AbstractBigArray implements FloatArray {
private float[][] pages;
private int[][] pages;
/** Constructor. */
public BigFloatArray(long size, BigArrays bigArrays, boolean clearOnResize) {
super(FLOAT_PAGE_SIZE, bigArrays, clearOnResize);
super(INT_PAGE_SIZE, bigArrays, clearOnResize);
this.size = size;
pages = new float[numPages(size)][];
pages = new int[numPages(size)][];
for (int i = 0; i < pages.length; ++i) {
pages[i] = newFloatPage(i);
pages[i] = newIntPage(i);
}
}
@ -49,9 +50,9 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
public float set(long index, float value) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
final float[] page = pages[pageIndex];
final float ret = page[indexInPage];
page[indexInPage] = value;
final int[] page = pages[pageIndex];
final float ret = Float.intBitsToFloat(page[indexInPage]);
page[indexInPage] = Float.floatToRawIntBits(value);
return ret;
}
@ -59,13 +60,14 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
public float increment(long index, float inc) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage] += inc;
final int[] page = pages[pageIndex];
return page[indexInPage] = Float.floatToRawIntBits(Float.intBitsToFloat(page[indexInPage]) + inc);
}
public float get(long index) {
final int pageIndex = pageIndex(index);
final int indexInPage = indexInPage(index);
return pages[pageIndex][indexInPage];
return Float.intBitsToFloat(pages[pageIndex][indexInPage]);
}
@Override
@ -80,7 +82,7 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
pages = Arrays.copyOf(pages, ArrayUtil.oversize(numPages, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
}
for (int i = numPages - 1; i >= 0 && pages[i] == null; --i) {
pages[i] = newFloatPage(i);
pages[i] = newIntPage(i);
}
for (int i = numPages; i < pages.length && pages[i] != null; ++i) {
pages[i] = null;
@ -92,16 +94,17 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
@Override
public void fill(long fromIndex, long toIndex, float value) {
Preconditions.checkArgument(fromIndex <= toIndex);
final int intBits = Float.floatToRawIntBits(value);
final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1);
if (fromPage == toPage) {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, value);
Arrays.fill(pages[fromPage], indexInPage(fromIndex), indexInPage(toIndex - 1) + 1, intBits);
} else {
Arrays.fill(pages[fromPage], indexInPage(fromIndex), pages[fromPage].length, value);
Arrays.fill(pages[fromPage], indexInPage(fromIndex), pages[fromPage].length, intBits);
for (int i = fromPage + 1; i < toPage; ++i) {
Arrays.fill(pages[i], value);
Arrays.fill(pages[i], intBits);
}
Arrays.fill(pages[toPage], 0, indexInPage(toIndex - 1) + 1, value);
Arrays.fill(pages[toPage], 0, indexInPage(toIndex - 1) + 1, intBits);
}
}

View File

@ -273,12 +273,6 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
}
private static final ThreadLocal<List<Field>> FIELD_LIST = new ThreadLocal<List<Field>>() {
protected List<Field> initialValue() {
return new ArrayList<>(2);
}
};
protected final Names names;
protected float boost;
protected FieldType fieldType;
@ -419,8 +413,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
@Override
public void parse(ParseContext context) throws IOException {
final List<Field> fields = FIELD_LIST.get();
assert fields.isEmpty();
final List<Field> fields = new ArrayList<>(2);
try {
parseCreateField(context, fields);
for (Field field : fields) {
@ -433,8 +426,6 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
} finally {
fields.clear();
}
multiFields.parse(this, context);
if (copyTo != null) {

View File

@ -44,13 +44,6 @@ import java.util.Map;
*/
public class DfsPhase implements SearchPhase {
private static ThreadLocal<ObjectOpenHashSet<Term>> cachedTermsSet = new ThreadLocal<ObjectOpenHashSet<Term>>() {
@Override
protected ObjectOpenHashSet<Term> initialValue() {
return new ObjectOpenHashSet<>();
}
};
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
return ImmutableMap.of();
@ -61,15 +54,12 @@ public class DfsPhase implements SearchPhase {
}
public void execute(SearchContext context) {
final ObjectOpenHashSet<Term> termsSet = cachedTermsSet.get();
final ObjectOpenHashSet<Term> termsSet = new ObjectOpenHashSet<>();
try {
if (!context.queryRewritten()) {
context.updateRewriteQuery(context.searcher().rewrite(context.query()));
}
if (!termsSet.isEmpty()) {
termsSet.clear();
}
context.query().extractTerms(new DelegateSet(termsSet));
for (RescoreSearchContext rescoreContext : context.rescore()) {
rescoreContext.rescorer().extractTerms(context, rescoreContext, new DelegateSet(termsSet));

View File

@ -93,8 +93,7 @@ public class RecyclerBenchmark {
.put("none", none(c))
.put("concurrent-queue", concurrentDeque(c, limit))
.put("locked", locked(deque(c, limit)))
.put("concurrent", concurrent(dequeFactory(c, limit), Runtime.getRuntime().availableProcessors()))
.put("soft-concurrent", concurrent(softFactory(dequeFactory(c, limit)), Runtime.getRuntime().availableProcessors())).build();
.put("concurrent", concurrent(dequeFactory(c, limit), Runtime.getRuntime().availableProcessors())).build();
// warmup
final long start = System.nanoTime();

View File

@ -1,29 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.recycler;
public class SoftConcurrentRecyclerTests extends AbstractRecyclerTests {
@Override
protected Recycler<byte[]> newRecycler(int limit) {
return Recyclers.concurrent(Recyclers.softFactory(Recyclers.dequeFactory(RECYCLER_C, limit)), randomIntBetween(1, 5));
}
}

View File

@ -144,15 +144,6 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
return wrap(page);
}
@Override
public V<double[]> doublePage(boolean clear) {
final V<double[]> page = super.doublePage(clear);
if (!clear) {
Arrays.fill(page.v(), 0, page.v().length, random.nextDouble() - 0.5);
}
return wrap(page);
}
@Override
public V<Object[]> objectPage() {
return wrap(super.objectPage());