mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-05 20:48:22 +00:00
[TEST] Adds mutate method to more tests (#26094)
* Adds mutate method to more tests Relates to #25929 * fixes tests
This commit is contained in:
parent
746487c3f3
commit
18e0fb5b3f
@ -189,8 +189,8 @@ public final class InternalBinaryRange
|
||||
}
|
||||
}
|
||||
|
||||
private final DocValueFormat format;
|
||||
private final boolean keyed;
|
||||
protected final DocValueFormat format;
|
||||
protected final boolean keyed;
|
||||
private final List<Bucket> buckets;
|
||||
|
||||
public InternalBinaryRange(String name, DocValueFormat format, boolean keyed, List<Bucket> buckets,
|
||||
|
@ -123,7 +123,8 @@ public abstract class InternalMappedTerms<A extends InternalTerms<A, B>, B exten
|
||||
&& Objects.equals(format, that.format)
|
||||
&& Objects.equals(otherDocCount, that.otherDocCount)
|
||||
&& Objects.equals(showTermDocCountError, that.showTermDocCountError)
|
||||
&& Objects.equals(shardSize, that.shardSize);
|
||||
&& Objects.equals(shardSize, that.shardSize)
|
||||
&& Objects.equals(docCountError, that.docCountError);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -30,6 +30,8 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
@ -222,4 +224,27 @@ public class StoredScriptTests extends AbstractSerializingTestCase<StoredScriptS
|
||||
throw new UncheckedIOException(ioe);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws IOException {
|
||||
String source = instance.getSource();
|
||||
String lang = instance.getLang();
|
||||
Map<String, String> options = instance.getOptions();
|
||||
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
source = randomAlphaOfLength(randomIntBetween(4, 16383));
|
||||
break;
|
||||
case 1:
|
||||
lang = randomAlphaOfLengthBetween(1, 20);
|
||||
break;
|
||||
case 2:
|
||||
options = new HashMap<>(options);
|
||||
options.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20));
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new StoredScriptSource(lang, source, options);
|
||||
}
|
||||
}
|
||||
|
@ -155,4 +155,21 @@ public class InternalOrderTests extends AbstractSerializingTestCase<BucketOrder>
|
||||
assertNotEquals(o1.hashCode(), o4.hashCode());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BucketOrder mutateInstance(BucketOrder instance) throws IOException {
|
||||
if (instance == InternalOrder.KEY_ASC) {
|
||||
return InternalOrder.COUNT_ASC;
|
||||
} else if (instance == InternalOrder.KEY_DESC) {
|
||||
return InternalOrder.KEY_ASC;
|
||||
} else if (instance == InternalOrder.COUNT_ASC) {
|
||||
return BucketOrder.aggregation(randomAlphaOfLengthBetween(1, 20), randomBoolean());
|
||||
} else if (instance == InternalOrder.COUNT_DESC) {
|
||||
return BucketOrder.compound(getRandomOrder());
|
||||
} else if (instance instanceof InternalOrder.Aggregation) {
|
||||
return InternalOrder.COUNT_DESC;
|
||||
} else {
|
||||
return InternalOrder.KEY_DESC;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
@ -102,4 +103,33 @@ public class InternalAdjacencyMatrixTests extends InternalMultiBucketAggregation
|
||||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedAdjacencyMatrix.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalAdjacencyMatrix mutateInstance(InternalAdjacencyMatrix instance) {
|
||||
String name = instance.getName();
|
||||
List<InternalAdjacencyMatrix.InternalBucket> buckets = instance.getBuckets();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new InternalAdjacencyMatrix.InternalBucket(randomAlphaOfLength(10), randomNonNegativeLong(),
|
||||
InternalAggregations.EMPTY));
|
||||
break;
|
||||
case 2:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalAdjacencyMatrix(name, buckets, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -122,4 +123,38 @@ public class InternalBinaryRangeTests extends InternalRangeTestCase<InternalBina
|
||||
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
|
||||
return ParsedBinaryRange.ParsedBucket.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalBinaryRange mutateInstance(InternalBinaryRange instance) {
|
||||
String name = instance.getName();
|
||||
DocValueFormat format = instance.format;
|
||||
boolean keyed = instance.keyed;
|
||||
List<InternalBinaryRange.Bucket> buckets = instance.getBuckets();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 2:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new InternalBinaryRange.Bucket(format, keyed, "range_a", new BytesRef(randomAlphaOfLengthBetween(1, 20)),
|
||||
new BytesRef(randomAlphaOfLengthBetween(1, 20)), randomNonNegativeLong(), InternalAggregations.EMPTY));
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
@ -25,14 +25,13 @@ import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalDateRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ParsedDateRange;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
@ -112,4 +111,39 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
|
||||
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
|
||||
return ParsedDateRange.ParsedBucket.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalDateRange mutateInstance(InternalDateRange instance) {
|
||||
String name = instance.getName();
|
||||
DocValueFormat format = instance.format;
|
||||
boolean keyed = instance.keyed;
|
||||
List<InternalDateRange.Bucket> buckets = instance.getBuckets();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 2:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
double from = randomDouble();
|
||||
buckets.add(new InternalDateRange.Bucket("range_a", from, from + randomDouble(), randomNonNegativeLong(),
|
||||
InternalAggregations.EMPTY, false, format));
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalDateRange(name, buckets, format, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
@ -24,12 +24,11 @@ import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalGeoDistance;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ParsedGeoDistance;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -99,4 +98,38 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
|
||||
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
|
||||
return ParsedGeoDistance.ParsedBucket.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalGeoDistance mutateInstance(InternalGeoDistance instance) {
|
||||
String name = instance.getName();
|
||||
boolean keyed = instance.keyed;
|
||||
List<InternalGeoDistance.Bucket> buckets = instance.getBuckets();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 2:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
double from = randomDouble();
|
||||
buckets.add(new InternalGeoDistance.Bucket("range_a", from, from + randomDouble(), randomNonNegativeLong(),
|
||||
InternalAggregations.EMPTY, false));
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalGeoDistance(name, buckets, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -110,4 +111,39 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
|
||||
protected Class<? extends ParsedMultiBucketAggregation.ParsedBucket> parsedRangeBucketClass() {
|
||||
return ParsedRange.ParsedBucket.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalRange mutateInstance(InternalRange instance) {
|
||||
String name = instance.getName();
|
||||
DocValueFormat format = instance.format;
|
||||
boolean keyed = instance.keyed;
|
||||
List<InternalRange.Bucket> buckets = instance.getBuckets();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 2:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
double from = randomDouble();
|
||||
buckets.add(new InternalRange.Bucket("range_a", from, from + randomDouble(), randomNonNegativeLong(),
|
||||
InternalAggregations.EMPTY, false, format));
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalRange<>(name, buckets, format, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -71,4 +72,83 @@ public class SignificantLongTermsTests extends InternalSignificantTermsTestCase
|
||||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedSignificantLongTerms.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalSignificantTerms<?, ?> mutateInstance(InternalSignificantTerms<?, ?> instance) {
|
||||
if (instance instanceof SignificantLongTerms) {
|
||||
SignificantLongTerms longTerms = (SignificantLongTerms) instance;
|
||||
String name = longTerms.getName();
|
||||
int requiredSize = longTerms.requiredSize;
|
||||
long minDocCount = longTerms.minDocCount;
|
||||
DocValueFormat format = longTerms.format;
|
||||
long subsetSize = longTerms.getSubsetSize();
|
||||
long supersetSize = longTerms.getSupersetSize();
|
||||
List<SignificantLongTerms.Bucket> buckets = longTerms.getBuckets();
|
||||
SignificanceHeuristic significanceHeuristic = longTerms.significanceHeuristic;
|
||||
List<PipelineAggregator> pipelineAggregators = longTerms.pipelineAggregators();
|
||||
Map<String, Object> metaData = longTerms.getMetaData();
|
||||
switch (between(0, 5)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
subsetSize += between(1, 100);
|
||||
break;
|
||||
case 4:
|
||||
supersetSize += between(1, 100);
|
||||
break;
|
||||
case 5:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new SignificantLongTerms.Bucket(randomLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
randomNonNegativeLong(), randomNonNegativeLong(), InternalAggregations.EMPTY, format));
|
||||
break;
|
||||
case 8:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new SignificantLongTerms(name, requiredSize, minDocCount, pipelineAggregators, metaData, format, subsetSize,
|
||||
supersetSize, significanceHeuristic, buckets);
|
||||
} else {
|
||||
String name = instance.getName();
|
||||
int requiredSize = instance.requiredSize;
|
||||
long minDocCount = instance.minDocCount;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new UnmappedSignificantTerms(name, requiredSize, minDocCount, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -64,4 +65,84 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas
|
||||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedSignificantStringTerms.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalSignificantTerms<?, ?> mutateInstance(InternalSignificantTerms<?, ?> instance) {
|
||||
if (instance instanceof SignificantStringTerms) {
|
||||
SignificantStringTerms stringTerms = (SignificantStringTerms) instance;
|
||||
String name = stringTerms.getName();
|
||||
int requiredSize = stringTerms.requiredSize;
|
||||
long minDocCount = stringTerms.minDocCount;
|
||||
DocValueFormat format = stringTerms.format;
|
||||
long subsetSize = stringTerms.getSubsetSize();
|
||||
long supersetSize = stringTerms.getSupersetSize();
|
||||
List<SignificantStringTerms.Bucket> buckets = stringTerms.getBuckets();
|
||||
SignificanceHeuristic significanceHeuristic = stringTerms.significanceHeuristic;
|
||||
List<PipelineAggregator> pipelineAggregators = stringTerms.pipelineAggregators();
|
||||
Map<String, Object> metaData = stringTerms.getMetaData();
|
||||
switch (between(0, 5)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
subsetSize += between(1, 100);
|
||||
break;
|
||||
case 4:
|
||||
supersetSize += between(1, 100);
|
||||
break;
|
||||
case 5:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new SignificantStringTerms.Bucket(new BytesRef(randomAlphaOfLengthBetween(1, 10)),
|
||||
randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(),
|
||||
InternalAggregations.EMPTY, format));
|
||||
break;
|
||||
case 8:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new SignificantStringTerms(name, requiredSize, minDocCount, pipelineAggregators, metaData, format, subsetSize,
|
||||
supersetSize, significanceHeuristic, buckets);
|
||||
} else {
|
||||
String name = instance.getName();
|
||||
int requiredSize = instance.requiredSize;
|
||||
long minDocCount = instance.minDocCount;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new UnmappedSignificantTerms(name, requiredSize, minDocCount, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -69,4 +70,92 @@ public class DoubleTermsTests extends InternalTermsTestCase {
|
||||
return ParsedDoubleTerms.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTerms<?, ?> mutateInstance(InternalTerms<?, ?> instance) {
|
||||
if (instance instanceof DoubleTerms) {
|
||||
DoubleTerms doubleTerms = (DoubleTerms) instance;
|
||||
String name = doubleTerms.getName();
|
||||
BucketOrder order = doubleTerms.order;
|
||||
int requiredSize = doubleTerms.requiredSize;
|
||||
long minDocCount = doubleTerms.minDocCount;
|
||||
DocValueFormat format = doubleTerms.format;
|
||||
int shardSize = doubleTerms.getShardSize();
|
||||
boolean showTermDocCountError = doubleTerms.showTermDocCountError;
|
||||
long otherDocCount = doubleTerms.getSumOfOtherDocCounts();
|
||||
List<DoubleTerms.Bucket> buckets = doubleTerms.getBuckets();
|
||||
long docCountError = doubleTerms.getDocCountError();
|
||||
List<PipelineAggregator> pipelineAggregators = doubleTerms.pipelineAggregators();
|
||||
Map<String, Object> metaData = doubleTerms.getMetaData();
|
||||
switch (between(0, 8)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
shardSize += between(1, 100);
|
||||
break;
|
||||
case 4:
|
||||
showTermDocCountError = showTermDocCountError == false;
|
||||
break;
|
||||
case 5:
|
||||
otherDocCount += between(1, 100);
|
||||
break;
|
||||
case 6:
|
||||
docCountError += between(1, 100);
|
||||
break;
|
||||
case 7:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new DoubleTerms.Bucket(randomDouble(), randomNonNegativeLong(), InternalAggregations.EMPTY,
|
||||
showTermDocCountError, docCountError, format));
|
||||
break;
|
||||
case 8:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new DoubleTerms(name, order, requiredSize, minDocCount, pipelineAggregators, metaData, format, shardSize,
|
||||
showTermDocCountError, otherDocCount, buckets, docCountError);
|
||||
} else {
|
||||
String name = instance.getName();
|
||||
BucketOrder order = instance.order;
|
||||
int requiredSize = instance.requiredSize;
|
||||
long minDocCount = instance.minDocCount;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new UnmappedTerms(name, order, requiredSize, minDocCount, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -68,4 +69,92 @@ public class LongTermsTests extends InternalTermsTestCase {
|
||||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedLongTerms.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTerms<?, ?> mutateInstance(InternalTerms<?, ?> instance) {
|
||||
if (instance instanceof LongTerms) {
|
||||
LongTerms longTerms = (LongTerms) instance;
|
||||
String name = longTerms.getName();
|
||||
BucketOrder order = longTerms.order;
|
||||
int requiredSize = longTerms.requiredSize;
|
||||
long minDocCount = longTerms.minDocCount;
|
||||
DocValueFormat format = longTerms.format;
|
||||
int shardSize = longTerms.getShardSize();
|
||||
boolean showTermDocCountError = longTerms.showTermDocCountError;
|
||||
long otherDocCount = longTerms.getSumOfOtherDocCounts();
|
||||
List<LongTerms.Bucket> buckets = longTerms.getBuckets();
|
||||
long docCountError = longTerms.getDocCountError();
|
||||
List<PipelineAggregator> pipelineAggregators = longTerms.pipelineAggregators();
|
||||
Map<String, Object> metaData = longTerms.getMetaData();
|
||||
switch (between(0, 8)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
shardSize += between(1, 100);
|
||||
break;
|
||||
case 4:
|
||||
showTermDocCountError = showTermDocCountError == false;
|
||||
break;
|
||||
case 5:
|
||||
otherDocCount += between(1, 100);
|
||||
break;
|
||||
case 6:
|
||||
docCountError += between(1, 100);
|
||||
break;
|
||||
case 7:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new LongTerms.Bucket(randomLong(), randomNonNegativeLong(), InternalAggregations.EMPTY, showTermDocCountError,
|
||||
docCountError, format));
|
||||
break;
|
||||
case 8:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new LongTerms(name, order, requiredSize, minDocCount, pipelineAggregators, metaData, format, shardSize,
|
||||
showTermDocCountError, otherDocCount, buckets, docCountError);
|
||||
} else {
|
||||
String name = instance.getName();
|
||||
BucketOrder order = instance.order;
|
||||
int requiredSize = instance.requiredSize;
|
||||
long minDocCount = instance.minDocCount;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new UnmappedTerms(name, order, requiredSize, minDocCount, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -69,4 +70,92 @@ public class StringTermsTests extends InternalTermsTestCase {
|
||||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedStringTerms.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTerms<?, ?> mutateInstance(InternalTerms<?, ?> instance) {
|
||||
if (instance instanceof StringTerms) {
|
||||
StringTerms stringTerms = (StringTerms) instance;
|
||||
String name = stringTerms.getName();
|
||||
BucketOrder order = stringTerms.order;
|
||||
int requiredSize = stringTerms.requiredSize;
|
||||
long minDocCount = stringTerms.minDocCount;
|
||||
DocValueFormat format = stringTerms.format;
|
||||
int shardSize = stringTerms.getShardSize();
|
||||
boolean showTermDocCountError = stringTerms.showTermDocCountError;
|
||||
long otherDocCount = stringTerms.getSumOfOtherDocCounts();
|
||||
List<StringTerms.Bucket> buckets = stringTerms.getBuckets();
|
||||
long docCountError = stringTerms.getDocCountError();
|
||||
List<PipelineAggregator> pipelineAggregators = stringTerms.pipelineAggregators();
|
||||
Map<String, Object> metaData = stringTerms.getMetaData();
|
||||
switch (between(0, 8)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
shardSize += between(1, 100);
|
||||
break;
|
||||
case 4:
|
||||
showTermDocCountError = showTermDocCountError == false;
|
||||
break;
|
||||
case 5:
|
||||
otherDocCount += between(1, 100);
|
||||
break;
|
||||
case 6:
|
||||
docCountError += between(1, 100);
|
||||
break;
|
||||
case 7:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new StringTerms.Bucket(new BytesRef(randomAlphaOfLengthBetween(1, 10)), randomNonNegativeLong(),
|
||||
InternalAggregations.EMPTY, showTermDocCountError, docCountError, format));
|
||||
break;
|
||||
case 8:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new StringTerms(name, order, requiredSize, minDocCount, pipelineAggregators, metaData, format, shardSize,
|
||||
showTermDocCountError, otherDocCount, buckets, docCountError);
|
||||
} else {
|
||||
String name = instance.getName();
|
||||
BucketOrder order = instance.order;
|
||||
int requiredSize = instance.requiredSize;
|
||||
long minDocCount = instance.minDocCount;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
requiredSize += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
minDocCount += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new UnmappedTerms(name, order, requiredSize, minDocCount, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -206,7 +206,18 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase<Int
|
||||
break;
|
||||
case 1:
|
||||
Object newValue = randomValue(valueTypes, 0);
|
||||
while (newValue.equals(value)) {
|
||||
while ((newValue == null && value == null) || newValue.equals(value)) {
|
||||
int levels = randomIntBetween(1, 3);
|
||||
Supplier[] valueTypes = new Supplier[levels];
|
||||
for (int i = 0; i < levels; i++) {
|
||||
if (i < levels - 1) {
|
||||
valueTypes[i] = randomFrom(nestedValueSuppliers);
|
||||
} else {
|
||||
// the last one needs to be a leaf value, not map or
|
||||
// list
|
||||
valueTypes[i] = randomFrom(leafValueSuppliers);
|
||||
}
|
||||
}
|
||||
newValue = randomValue(valueTypes, 0);
|
||||
}
|
||||
value = newValue;
|
||||
|
Loading…
x
Reference in New Issue
Block a user