[Test] Reduce number of buckets in SearchResponseTests and AggregationsTests (#24964)

This commit reduces the number of buckets that are generated for multi
 bucket aggregations in AggregationsTests and SearchResponseTests.

 The number of buckets are now limited to a maximum of 3 but before some
 aggregations could generate up to 10 buckets.
This commit is contained in:
Tanguy Leroux 2017-06-02 15:59:25 +02:00 committed by GitHub
parent b8605775df
commit 5f3ed99c71
16 changed files with 110 additions and 55 deletions

View File

@ -95,7 +95,6 @@ public class SearchResponseTests extends ESTestCase {
return new SearchResponse(internalSearchResponse, null, totalShards, successfulShards, tookInMillis, shardSearchFailures);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/24891")
public void testFromXContent() throws IOException {
// the "_shard/total/failures" section makes if impossible to directly compare xContent, so we omit it here
SearchResponse response = createTestItem();

View File

@ -147,6 +147,11 @@ public class AggregationsTests extends ESTestCase {
@Before
public void init() throws Exception {
for (InternalAggregationTestCase aggsTest : aggsTests) {
if (aggsTest instanceof InternalMultiBucketAggregationTestCase) {
// Lower down the number of buckets generated by multi bucket aggregation tests in
// order to avoid too many aggregations to be created.
((InternalMultiBucketAggregationTestCase) aggsTest).maxNumberOfBuckets = 3;
}
aggsTest.setUp();
}
}
@ -166,7 +171,6 @@ public class AggregationsTests extends ESTestCase {
}
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/24891")
public void testFromXContent() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));

View File

@ -36,7 +36,22 @@ import static java.util.Collections.emptyMap;
public abstract class InternalMultiBucketAggregationTestCase<T extends InternalAggregation & MultiBucketsAggregation>
extends InternalAggregationTestCase<T> {
private static final int DEFAULT_MAX_NUMBER_OF_BUCKETS = 10;
Supplier<InternalAggregations> subAggregationsSupplier;
int maxNumberOfBuckets = DEFAULT_MAX_NUMBER_OF_BUCKETS;
protected int randomNumberOfBuckets() {
return randomIntBetween(minNumberOfBuckets(), maxNumberOfBuckets());
}
protected int minNumberOfBuckets() {
return 0;
}
protected int maxNumberOfBuckets() {
return maxNumberOfBuckets;
}
@Override
public void setUp() throws Exception {
@ -57,7 +72,10 @@ public abstract class InternalMultiBucketAggregationTestCase<T extends InternalA
@Override
protected final T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
return createTestInstance(name, pipelineAggregators, metaData, subAggregationsSupplier.get());
T instance = createTestInstance(name, pipelineAggregators, metaData, subAggregationsSupplier.get());
assert instance.getBuckets().size() <= maxNumberOfBuckets() :
"Maximum number of buckets exceeded for " + instance.getClass().getSimpleName() + " aggregation";
return instance;
}
protected abstract T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,

View File

@ -34,10 +34,19 @@ public class InternalAdjacencyMatrixTests extends InternalMultiBucketAggregation
private List<String> keys;
@Override
protected int maxNumberOfBuckets() {
return 10;
}
@Override
public void setUp() throws Exception {
super.setUp();
keys = new ArrayList<>();
// InternalAdjacencyMatrix represents the upper triangular matrix:
// 2 filters (matrix of 2x2) generates 3 buckets
// 3 filters generates 6 buckets
// 4 filters generates 10 buckets
int numFilters = randomIntBetween(2, 4);
String[] filters = new String[numFilters];
for (int i = 0; i < numFilters; i++) {

View File

@ -40,7 +40,7 @@ public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase
super.setUp();
keyed = randomBoolean();
keys = new ArrayList<>();
int numBuckets = randomIntBetween(1, 5);
int numBuckets = randomNumberOfBuckets();
for (int i = 0; i < numBuckets; i++) {
if (keyed) {
keys.add(randomAlphaOfLength(5));

View File

@ -33,12 +33,22 @@ import java.util.Map;
public class InternalGeoHashGridTests extends InternalMultiBucketAggregationTestCase<InternalGeoHashGrid> {
@Override
protected int minNumberOfBuckets() {
return 1;
}
@Override
protected int maxNumberOfBuckets() {
return 3;
}
@Override
protected InternalGeoHashGrid createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
int size = randomIntBetween(1, 3);
int size = randomNumberOfBuckets();
List<InternalGeoHashGrid.Bucket> buckets = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
double latitude = randomDoubleBetween(-90.0, 90.0, false);

View File

@ -55,7 +55,7 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
int nbBuckets = randomInt(10);
int nbBuckets = randomNumberOfBuckets();
List<InternalDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets);
long startingDate = System.currentTimeMillis();

View File

@ -51,7 +51,7 @@ public class InternalHistogramTests extends InternalMultiBucketAggregationTestCa
Map<String, Object> metaData,
InternalAggregations aggregations) {
final int base = randomInt(50) - 30;
final int numBuckets = randomInt(10);
final int numBuckets = randomNumberOfBuckets();
final int interval = randomIntBetween(1, 3);
List<InternalHistogram.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < numBuckets; ++i) {

View File

@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -36,30 +37,36 @@ public class InternalBinaryRangeTests extends InternalRangeTestCase<InternalBina
private List<Tuple<BytesRef, BytesRef>> ranges;
@Override
protected int minNumberOfBuckets() {
return 1;
}
@Override
public void setUp() throws Exception {
super.setUp();
final int numRanges = randomIntBetween(1, 10);
ranges = new ArrayList<>(numRanges);
List<Tuple<BytesRef, BytesRef>> listOfRanges = new ArrayList<>();
if (randomBoolean()) {
listOfRanges.add(Tuple.tuple(null, new BytesRef(randomAlphaOfLength(15))));
}
if (randomBoolean()) {
listOfRanges.add(Tuple.tuple(new BytesRef(randomAlphaOfLength(15)), null));
}
if (randomBoolean()) {
listOfRanges.add(Tuple.tuple(null, null));
}
final int numRanges = Math.max(0, randomNumberOfBuckets() - listOfRanges.size());
for (int i = 0; i < numRanges; i++) {
BytesRef[] values = new BytesRef[2];
values[0] = new BytesRef(randomAlphaOfLength(15));
values[1] = new BytesRef(randomAlphaOfLength(15));
Arrays.sort(values);
ranges.add(Tuple.tuple(values[0], values[1]));
}
if (randomBoolean()) {
ranges.add(Tuple.tuple(null, new BytesRef(randomAlphaOfLength(15))));
}
if (randomBoolean()) {
ranges.add(Tuple.tuple(new BytesRef(randomAlphaOfLength(15)), null));
}
if (randomBoolean()) {
ranges.add(Tuple.tuple(null, null));
listOfRanges.add(Tuple.tuple(values[0], values[1]));
}
Collections.shuffle(listOfRanges, random());
ranges = Collections.unmodifiableList(listOfRanges);
}
@Override

View File

@ -42,22 +42,7 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
super.setUp();
format = randomNumericDocValueFormat();
final int interval = randomFrom(1, 5, 10, 25, 50, 100);
final int numRanges = randomIntBetween(1, 10);
List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);
for (int i = 0; i < numRanges; i++) {
double from = i * interval;
double to = from + interval;
listOfRanges.add(Tuple.tuple(from, to));
}
if (randomBoolean()) {
// Add some overlapping ranges
double max = (double) numRanges * interval;
listOfRanges.add(Tuple.tuple(0.0, max));
listOfRanges.add(Tuple.tuple(0.0, max / 2));
listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2));
}
List<Tuple<Double, Double>> listOfRanges = new ArrayList<>();
if (rarely()) {
listOfRanges.add(Tuple.tuple(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY));
}
@ -67,6 +52,25 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
if (rarely()) {
listOfRanges.add(Tuple.tuple(randomDouble(), Double.POSITIVE_INFINITY));
}
final int interval = randomFrom(1, 5, 10, 25, 50, 100);
final int numRanges = Math.max(0, randomNumberOfBuckets() - listOfRanges.size());
final double max = (double) numRanges * interval;
for (int i = 0; numRanges - listOfRanges.size() > 0; i++) {
double from = i * interval;
double to = from + interval;
Tuple<Double, Double> range;
if (randomBoolean()) {
range = Tuple.tuple(from, to);
} else {
// Add some overlapping range
range = Tuple.tuple(randomFrom(0.0, max / 3), randomFrom(max, max / 2, max / 3 * 2));
}
listOfRanges.add(range);
}
Collections.shuffle(listOfRanges, random());
ranges = Collections.unmodifiableList(listOfRanges);
}

View File

@ -50,7 +50,7 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
dateTime -> dateTime.plusHours(1), dateTime -> dateTime.plusDays(1), dateTime -> dateTime.plusMonths(1), dateTime ->
dateTime.plusYears(1));
final int numRanges = randomIntBetween(1, 10);
final int numRanges = randomNumberOfBuckets();
final List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);
DateTime date = new DateTime(DateTimeZone.UTC);
@ -60,17 +60,18 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
double from = date.getMillis();
date = interval.apply(date);
double to = date.getMillis();
listOfRanges.add(Tuple.tuple(from, to));
if (to > end) {
end = to;
}
}
if (randomBoolean()) {
final int randomOverlaps = randomIntBetween(1, 5);
for (int i = 0; i < randomOverlaps; i++) {
if (randomBoolean()) {
listOfRanges.add(Tuple.tuple(from, to));
} else {
// Add some overlapping range
listOfRanges.add(Tuple.tuple(start, randomDoubleBetween(start, end, false)));
}
}
Collections.shuffle(listOfRanges, random());
dateRanges = Collections.unmodifiableList(listOfRanges);
}

View File

@ -41,21 +41,24 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
super.setUp();
final int interval = randomFrom(1, 5, 10, 25, 50, 100);
final int numRanges = randomIntBetween(1, 10);
final int numRanges = randomNumberOfBuckets();
final double max = (double) numRanges * interval;
List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);
for (int i = 0; i < numRanges; i++) {
double from = i * interval;
double to = from + interval;
listOfRanges.add(Tuple.tuple(from, to));
}
if (randomBoolean()) {
// Add some overlapping ranges
double max = (double) numRanges * interval;
listOfRanges.add(Tuple.tuple(0.0, max));
listOfRanges.add(Tuple.tuple(0.0, max / 2));
listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2));
Tuple<Double, Double> range;
if (randomBoolean()) {
range = Tuple.tuple(from, to);
} else {
// Add some overlapping range
range = Tuple.tuple(randomFrom(0.0, max / 3), randomFrom(max, max / 2, max / 3 * 2));
}
listOfRanges.add(range);
}
Collections.shuffle(listOfRanges, random());
geoDistanceRanges = Collections.unmodifiableList(listOfRanges);
}

View File

@ -53,7 +53,7 @@ public abstract class InternalSignificantTermsTestCase extends InternalMultiBuck
Map<String, Object> metaData,
InternalAggregations aggregations) {
final int requiredSize = randomIntBetween(1, 5);
final int numBuckets = randomInt(requiredSize + 2);
final int numBuckets = randomNumberOfBuckets();
long subsetSize = 0;
long supersetSize = 0;

View File

@ -48,7 +48,7 @@ public class DoubleTermsTests extends InternalTermsTestCase {
DocValueFormat format = randomNumericDocValueFormat();
long otherDocCount = 0;
List<DoubleTerms.Bucket> buckets = new ArrayList<>();
final int numBuckets = randomInt(shardSize);
final int numBuckets = randomNumberOfBuckets();
Set<Double> terms = new HashSet<>();
for (int i = 0; i < numBuckets; ++i) {
double term = randomValueOtherThanMany(d -> terms.add(d) == false, random()::nextDouble);

View File

@ -48,7 +48,7 @@ public class LongTermsTests extends InternalTermsTestCase {
DocValueFormat format = randomNumericDocValueFormat();
long otherDocCount = 0;
List<LongTerms.Bucket> buckets = new ArrayList<>();
final int numBuckets = randomInt(shardSize);
final int numBuckets = randomNumberOfBuckets();
Set<Long> terms = new HashSet<>();
for (int i = 0; i < numBuckets; ++i) {
long term = randomValueOtherThanMany(l -> terms.add(l) == false, random()::nextLong);

View File

@ -49,7 +49,7 @@ public class StringTermsTests extends InternalTermsTestCase {
DocValueFormat format = DocValueFormat.RAW;
long otherDocCount = 0;
List<StringTerms.Bucket> buckets = new ArrayList<>();
final int numBuckets = randomInt(shardSize);
final int numBuckets = randomNumberOfBuckets();
Set<BytesRef> terms = new HashSet<>();
for (int i = 0; i < numBuckets; ++i) {
BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10)));