[TEST] Adds mutate function for all metric aggregation tests (#26056)
* Adds mutate function for all metric aggregation tests Relates to #25929 * fixes tests * fixes review comments * Fixes cardinality equals method * Fixes scripted metric test
This commit is contained in:
parent
8fda74aee1
commit
a4ae8a9156
|
@ -26,14 +26,14 @@ import org.elasticsearch.common.rounding.Rounding;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalOrder;
|
||||
import org.elasticsearch.search.aggregations.KeyComparable;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -277,6 +277,22 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
|
|||
return Collections.unmodifiableList(buckets);
|
||||
}
|
||||
|
||||
DocValueFormat getFormatter() {
|
||||
return format;
|
||||
}
|
||||
|
||||
long getMinDocCount() {
|
||||
return minDocCount;
|
||||
}
|
||||
|
||||
long getOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
BucketOrder getOrder() {
|
||||
return order;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalDateHistogram create(List<Bucket> buckets) {
|
||||
return new InternalDateHistogram(name, buckets, order, minDocCount, offset, emptyBucketInfo, format,
|
||||
|
|
|
@ -25,14 +25,14 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalOrder;
|
||||
import org.elasticsearch.search.aggregations.KeyComparable;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -267,6 +267,14 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
|
|||
return Collections.unmodifiableList(buckets);
|
||||
}
|
||||
|
||||
long getMinDocCount() {
|
||||
return minDocCount;
|
||||
}
|
||||
|
||||
BucketOrder getOrder() {
|
||||
return order;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalHistogram create(List<Bucket> buckets) {
|
||||
return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, format, keyed, pipelineAggregators(), metaData);
|
||||
|
|
|
@ -78,6 +78,10 @@ public class InternalAvg extends InternalNumericMetricsAggregation.SingleValue i
|
|||
return count;
|
||||
}
|
||||
|
||||
DocValueFormat getFormatter() {
|
||||
return format;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return AvgAggregationBuilder.NAME;
|
||||
|
|
|
@ -34,7 +34,9 @@ import org.elasticsearch.common.util.IntArray;
|
|||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -423,20 +425,28 @@ public final class HyperLogLogPlusPlus implements Releasable {
|
|||
Releasables.close(runLens, hashSet.sizes);
|
||||
}
|
||||
|
||||
private Set<Object> getComparableData(long bucket) {
|
||||
Set<Object> values = new HashSet<>();
|
||||
private Object getComparableData(long bucket) {
|
||||
if (algorithm.get(bucket) == LINEAR_COUNTING) {
|
||||
Set<Integer> values = new HashSet<>();
|
||||
try (IntArray hashSetValues = hashSet.values(bucket)) {
|
||||
for (long i = 0; i < hashSetValues.size(); i++) {
|
||||
values.add(hashSetValues.get(i));
|
||||
}
|
||||
}
|
||||
return values;
|
||||
} else {
|
||||
Map<Byte, Integer> values = new HashMap<>();
|
||||
for (long i = 0; i < runLens.size(); i++) {
|
||||
values.add(runLens.get((bucket << p) + i));
|
||||
byte runLength = runLens.get((bucket << p) + i);
|
||||
Integer numOccurances = values.get(runLength);
|
||||
if (numOccurances == null) {
|
||||
values.put(runLength, 1);
|
||||
} else {
|
||||
values.put(runLength, numOccurances + 1);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
public int hashCode(long bucket) {
|
||||
|
@ -446,7 +456,7 @@ public final class HyperLogLogPlusPlus implements Releasable {
|
|||
public boolean equals(long bucket, HyperLogLogPlusPlus other) {
|
||||
return Objects.equals(p, other.p) &&
|
||||
Objects.equals(algorithm.get(bucket), other.algorithm.get(bucket)) &&
|
||||
Objects.equals(getComparableData(bucket), getComparableData(bucket));
|
||||
Objects.equals(getComparableData(bucket), other.getComparableData(bucket));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -254,7 +254,8 @@ public class InternalGeoBounds extends InternalAggregation implements GeoBounds
|
|||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
InternalGeoBounds other = (InternalGeoBounds) obj;
|
||||
return bottom == other.bottom &&
|
||||
return top == other.top &&
|
||||
bottom == other.bottom &&
|
||||
posLeft == other.posLeft &&
|
||||
posRight == other.posRight &&
|
||||
negLeft == other.negLeft &&
|
||||
|
|
|
@ -40,7 +40,7 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
|
||||
protected final double[] keys;
|
||||
protected final DoubleHistogram state;
|
||||
private final boolean keyed;
|
||||
protected final boolean keyed;
|
||||
|
||||
AbstractInternalHDRPercentiles(String name, double[] keys, DoubleHistogram state, boolean keyed, DocValueFormat format,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
|
@ -89,6 +89,10 @@ abstract class AbstractInternalHDRPercentiles extends InternalNumericMetricsAggr
|
|||
return value(Double.parseDouble(name));
|
||||
}
|
||||
|
||||
DocValueFormat formatter() {
|
||||
return format;
|
||||
}
|
||||
|
||||
public abstract double value(double key);
|
||||
|
||||
public long getEstimatedMemoryFootprint() {
|
||||
|
|
|
@ -37,7 +37,7 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
|
||||
protected final double[] keys;
|
||||
protected final TDigestState state;
|
||||
private final boolean keyed;
|
||||
final boolean keyed;
|
||||
|
||||
AbstractInternalTDigestPercentiles(String name, double[] keys, TDigestState state, boolean keyed, DocValueFormat formatter,
|
||||
List<PipelineAggregator> pipelineAggregators,
|
||||
|
@ -75,6 +75,10 @@ abstract class AbstractInternalTDigestPercentiles extends InternalNumericMetrics
|
|||
|
||||
public abstract double value(double key);
|
||||
|
||||
DocValueFormat formatter() {
|
||||
return format;
|
||||
}
|
||||
|
||||
public long getEstimatedMemoryFootprint() {
|
||||
return state.byteSize();
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
|
||||
public class InternalScriptedMetric extends InternalAggregation implements ScriptedMetric {
|
||||
private final Script reduceScript;
|
||||
final Script reduceScript;
|
||||
private final List<Object> aggregation;
|
||||
|
||||
public InternalScriptedMetric(String name, Object aggregation, Script reduceScript, List<PipelineAggregator> pipelineAggregators,
|
||||
|
|
|
@ -211,8 +211,8 @@ public class InternalStats extends InternalNumericMetricsAggregation.MultiValue
|
|||
protected boolean doEquals(Object obj) {
|
||||
InternalStats other = (InternalStats) obj;
|
||||
return count == other.count &&
|
||||
min == other.min &&
|
||||
max == other.max &&
|
||||
Double.compare(count, other.count) == 0;
|
||||
Double.compare(min, other.min) == 0 &&
|
||||
Double.compare(max, other.max) == 0 &&
|
||||
Double.compare(sum, other.sum) == 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,6 +89,10 @@ public class InternalTopHits extends InternalAggregation implements TopHits {
|
|||
return topDocs;
|
||||
}
|
||||
|
||||
int getFrom() {
|
||||
return from;
|
||||
}
|
||||
|
||||
int getSize() {
|
||||
return size;
|
||||
}
|
||||
|
|
|
@ -53,19 +53,19 @@ import org.elasticsearch.search.aggregations.bucket.terms.LongTermsTests;
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.StringTermsTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalExtendedStatsTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalMaxTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalMinTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalStatsBucketTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalStatsTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalSumTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvgTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinalityTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBoundsTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroidTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.InternalMinTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentilesRanksTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentilesTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentilesRanksTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentilesTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetricTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.InternalSumTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHitsTests;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCountTests;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValueTests;
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid.Bucket;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -108,4 +109,38 @@ public class InternalGeoHashGridTests extends InternalMultiBucketAggregationTest
|
|||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedGeoHashGrid.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalGeoHashGrid mutateInstance(InternalGeoHashGrid instance) {
|
||||
String name = instance.getName();
|
||||
int size = instance.getRequiredSize();
|
||||
List<Bucket> buckets = instance.getBuckets();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(
|
||||
new InternalGeoHashGrid.Bucket(randomNonNegativeLong(), randomInt(IndexWriter.MAX_DOCS), InternalAggregations.EMPTY));
|
||||
break;
|
||||
case 2:
|
||||
size = size + between(1, 10);
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalGeoHashGrid(name, size, buckets, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,15 +21,15 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
|
|||
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
@ -97,4 +97,46 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe
|
|||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedDateHistogram.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) {
|
||||
String name = instance.getName();
|
||||
List<InternalDateHistogram.Bucket> buckets = instance.getBuckets();
|
||||
BucketOrder order = instance.getOrder();
|
||||
long minDocCount = instance.getMinDocCount();
|
||||
long offset = instance.getOffset();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 5)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new InternalDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), keyed, format,
|
||||
InternalAggregations.EMPTY));
|
||||
break;
|
||||
case 2:
|
||||
order = BucketOrder.count(randomBoolean());
|
||||
break;
|
||||
case 3:
|
||||
minDocCount += between(1, 10);
|
||||
break;
|
||||
case 4:
|
||||
offset += between(1, 20);
|
||||
break;
|
||||
case 5:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalDateHistogram(name, buckets, order, minDocCount, offset, null, format, keyed, pipelineAggregators,
|
||||
metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
@ -88,4 +89,41 @@ public class InternalHistogramTests extends InternalMultiBucketAggregationTestCa
|
|||
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
|
||||
return ParsedHistogram.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalHistogram mutateInstance(InternalHistogram instance) {
|
||||
String name = instance.getName();
|
||||
List<InternalHistogram.Bucket> buckets = instance.getBuckets();
|
||||
BucketOrder order = instance.getOrder();
|
||||
long minDocCount = instance.getMinDocCount();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 4)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new InternalHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), keyed, format,
|
||||
InternalAggregations.EMPTY));
|
||||
break;
|
||||
case 2:
|
||||
order = BucketOrder.count(randomBoolean());
|
||||
break;
|
||||
case 3:
|
||||
minDocCount += between(1, 10);
|
||||
break;
|
||||
case 4:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalHistogram(name, buckets, order, minDocCount, null, format, keyed, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtend
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -115,4 +116,76 @@ public class InternalExtendedStatsTests extends InternalAggregationTestCase<Inte
|
|||
protected Writeable.Reader<InternalExtendedStats> instanceReader() {
|
||||
return InternalExtendedStats::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalExtendedStats mutateInstance(InternalExtendedStats instance) {
|
||||
String name = instance.getName();
|
||||
long count = instance.getCount();
|
||||
double sum = instance.getSum();
|
||||
double min = instance.getMin();
|
||||
double max = instance.getMax();
|
||||
double sumOfSqrs = instance.getSumOfSquares();
|
||||
double sigma = instance.getSigma();
|
||||
DocValueFormat formatter = instance.format;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 7)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (Double.isFinite(count)) {
|
||||
count += between(1, 100);
|
||||
} else {
|
||||
count = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (Double.isFinite(sum)) {
|
||||
sum += between(1, 100);
|
||||
} else {
|
||||
sum = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
if (Double.isFinite(min)) {
|
||||
min += between(1, 100);
|
||||
} else {
|
||||
min = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
if (Double.isFinite(max)) {
|
||||
max += between(1, 100);
|
||||
} else {
|
||||
max = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 5:
|
||||
if (Double.isFinite(sumOfSqrs)) {
|
||||
sumOfSqrs += between(1, 100);
|
||||
} else {
|
||||
sumOfSqrs = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 6:
|
||||
if (Double.isFinite(sigma)) {
|
||||
sigma += between(1, 10);
|
||||
} else {
|
||||
sigma = between(1, 10);
|
||||
}
|
||||
break;
|
||||
case 7:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalExtendedStats(name, count, sum, min, max, sumOfSqrs, sigma, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.metrics.max.ParsedMax;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -61,4 +62,36 @@ public class InternalMaxTests extends InternalAggregationTestCase<InternalMax> {
|
|||
assertEquals(parsed.getValue(), Double.NEGATIVE_INFINITY, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalMax mutateInstance(InternalMax instance) {
|
||||
String name = instance.getName();
|
||||
double value = instance.getValue();
|
||||
DocValueFormat formatter = instance.format;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (Double.isFinite(value)) {
|
||||
value += between(1, 100);
|
||||
} else {
|
||||
value = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalMax(name, value, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,14 +17,17 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.min;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.ParsedMin;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -58,4 +61,36 @@ public class InternalMinTests extends InternalAggregationTestCase<InternalMin> {
|
|||
assertEquals(parsed.getValue(), Double.POSITIVE_INFINITY, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalMin mutateInstance(InternalMin instance) {
|
||||
String name = instance.getName();
|
||||
double value = instance.getValue();
|
||||
DocValueFormat formatter = instance.format;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (Double.isFinite(value)) {
|
||||
value += between(1, 100);
|
||||
} else {
|
||||
value = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalMin(name, value, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -96,5 +97,61 @@ public class InternalStatsTests extends InternalAggregationTestCase<InternalStat
|
|||
protected Writeable.Reader<InternalStats> instanceReader() {
|
||||
return InternalStats::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalStats mutateInstance(InternalStats instance) {
|
||||
String name = instance.getName();
|
||||
long count = instance.getCount();
|
||||
double sum = instance.getSum();
|
||||
double min = instance.getMin();
|
||||
double max = instance.getMax();
|
||||
DocValueFormat formatter = instance.format;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 5)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (Double.isFinite(count)) {
|
||||
count += between(1, 100);
|
||||
} else {
|
||||
count = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (Double.isFinite(sum)) {
|
||||
sum += between(1, 100);
|
||||
} else {
|
||||
sum = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
if (Double.isFinite(min)) {
|
||||
min += between(1, 100);
|
||||
} else {
|
||||
min = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
if (Double.isFinite(max)) {
|
||||
max += between(1, 100);
|
||||
} else {
|
||||
max = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 5:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalStats(name, count, sum, min, max, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,14 +16,17 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.sum;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.ParsedSum;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -53,4 +56,36 @@ public class InternalSumTests extends InternalAggregationTestCase<InternalSum> {
|
|||
assertEquals(sum.getValue(), parsed.getValue(), Double.MIN_VALUE);
|
||||
assertEquals(sum.getValueAsString(), parsed.getValueAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalSum mutateInstance(InternalSum instance) {
|
||||
String name = instance.getName();
|
||||
double value = instance.getValue();
|
||||
DocValueFormat formatter = instance.format;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (Double.isFinite(value)) {
|
||||
value += between(1, 100);
|
||||
} else {
|
||||
value = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalSum(name, value, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.min;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
|
@ -30,6 +30,9 @@ import org.apache.lucene.store.Directory;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregator;
|
||||
|
||||
public class MinAggregatorTests extends AggregatorTestCase {
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.sum;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
|
@ -38,6 +38,9 @@ import org.elasticsearch.common.CheckedConsumer;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
|
@ -34,6 +34,9 @@ import org.elasticsearch.common.CheckedConsumer;
|
|||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.ParsedAggregation;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -64,4 +65,44 @@ public class InternalAvgTests extends InternalAggregationTestCase<InternalAvg> {
|
|||
assertEquals(avg.getValueAsString(), parsed.getValueAsString());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalAvg mutateInstance(InternalAvg instance) {
|
||||
String name = instance.getName();
|
||||
double sum = instance.getSum();
|
||||
long count = instance.getCount();
|
||||
DocValueFormat formatter = instance.getFormatter();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (Double.isFinite(sum)) {
|
||||
sum += between(1, 100);
|
||||
} else {
|
||||
sum = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (Double.isFinite(count)) {
|
||||
count += between(1, 100);
|
||||
} else {
|
||||
count = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalAvg(name, sum, count, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,18 +19,20 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.cardinality;
|
||||
|
||||
import com.carrotsearch.hppc.BitMixer;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
import org.junit.After;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -61,7 +63,7 @@ public class InternalCardinalityTests extends InternalAggregationTestCase<Intern
|
|||
new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()), 1);
|
||||
algos.add(hllpp);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
hllpp.collect(0, randomIntBetween(1, 100));
|
||||
hllpp.collect(0, BitMixer.mix64(randomIntBetween(1, 100)));
|
||||
}
|
||||
return new InternalCardinality(name, hllpp, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -92,4 +94,39 @@ public class InternalCardinalityTests extends InternalAggregationTestCase<Intern
|
|||
assertEquals(aggregation.getValue(), parsed.getValue(), Double.MIN_VALUE);
|
||||
assertEquals(aggregation.getValueAsString(), parsed.getValueAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalCardinality mutateInstance(InternalCardinality instance) {
|
||||
String name = instance.getName();
|
||||
HyperLogLogPlusPlus state = instance.getState();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
HyperLogLogPlusPlus newState = new HyperLogLogPlusPlus(state.precision(),
|
||||
new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()), 0);
|
||||
newState.merge(0, state, 0);
|
||||
int extraValues = between(10, 100);
|
||||
for (int i = 0; i < extraValues; i++) {
|
||||
newState.collect(0, BitMixer.mix64(randomIntBetween(500, 10000)));
|
||||
}
|
||||
algos.add(newState);
|
||||
state = newState;
|
||||
break;
|
||||
case 2:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalCardinality(name, state, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -102,4 +103,55 @@ public class InternalGeoBoundsTests extends InternalAggregationTestCase<Internal
|
|||
protected Writeable.Reader<InternalGeoBounds> instanceReader() {
|
||||
return InternalGeoBounds::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalGeoBounds mutateInstance(InternalGeoBounds instance) {
|
||||
String name = instance.getName();
|
||||
double top = instance.top;
|
||||
double bottom = instance.bottom;
|
||||
double posLeft = instance.posLeft;
|
||||
double posRight = instance.posRight;
|
||||
double negLeft = instance.negLeft;
|
||||
double negRight = instance.negRight;
|
||||
boolean wrapLongitude = instance.wrapLongitude;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 8)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
top += between(1, 20);
|
||||
break;
|
||||
case 2:
|
||||
bottom += between(1, 20);
|
||||
break;
|
||||
case 3:
|
||||
posLeft += between(1, 20);
|
||||
break;
|
||||
case 4:
|
||||
posRight += between(1, 20);
|
||||
break;
|
||||
case 5:
|
||||
negLeft += between(1, 20);
|
||||
break;
|
||||
case 6:
|
||||
negRight += between(1, 20);
|
||||
break;
|
||||
case 7:
|
||||
wrapLongitude = wrapLongitude == false;
|
||||
break;
|
||||
case 8:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.test.InternalAggregationTestCase;
|
|||
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -80,4 +81,41 @@ public class InternalGeoCentroidTests extends InternalAggregationTestCase<Intern
|
|||
assertEquals(aggregation.centroid(), parsed.centroid());
|
||||
assertEquals(aggregation.count(), parsed.count());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalGeoCentroid mutateInstance(InternalGeoCentroid instance) {
|
||||
String name = instance.getName();
|
||||
GeoPoint centroid = instance.centroid();
|
||||
long count = instance.count();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
count += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
GeoPoint newCentroid = new GeoPoint(centroid);
|
||||
if (randomBoolean()) {
|
||||
newCentroid.resetLat(centroid.getLat() / 2.0);
|
||||
} else {
|
||||
newCentroid.resetLon(centroid.getLon() / 2.0);
|
||||
}
|
||||
centroid = newCentroid;
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalGeoCentroid(name, centroid, count, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentil
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -61,4 +62,45 @@ public class InternalHDRPercentilesRanksTests extends InternalPercentilesRanksTe
|
|||
protected Class<? extends ParsedPercentiles> implementationClass() {
|
||||
return ParsedHDRPercentileRanks.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalHDRPercentileRanks mutateInstance(InternalHDRPercentileRanks instance) {
|
||||
String name = instance.getName();
|
||||
double[] percents = instance.keys;
|
||||
DoubleHistogram state = instance.state;
|
||||
boolean keyed = instance.keyed;
|
||||
DocValueFormat formatter = instance.formatter();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 4)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
percents = Arrays.copyOf(percents, percents.length + 1);
|
||||
percents[percents.length - 1] = randomDouble() * 100;
|
||||
Arrays.sort(percents);
|
||||
break;
|
||||
case 2:
|
||||
state = new DoubleHistogram(state);
|
||||
for (int i = 0; i < between(10, 100); i++) {
|
||||
state.recordValue(randomDouble());
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 4:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalHDRPercentileRanks(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -88,4 +89,45 @@ public class InternalHDRPercentilesTests extends InternalPercentilesTestCase<Int
|
|||
assertEquals(aggregation.percentile(percent), percentile.getValue(), 0.0d);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalHDRPercentiles mutateInstance(InternalHDRPercentiles instance) {
|
||||
String name = instance.getName();
|
||||
double[] percents = instance.keys;
|
||||
DoubleHistogram state = instance.state;
|
||||
boolean keyed = instance.keyed;
|
||||
DocValueFormat formatter = instance.formatter();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 4)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
percents = Arrays.copyOf(percents, percents.length + 1);
|
||||
percents[percents.length - 1] = randomDouble() * 100;
|
||||
Arrays.sort(percents);
|
||||
break;
|
||||
case 2:
|
||||
state = new DoubleHistogram(state);
|
||||
for (int i = 0; i < between(10, 100); i++) {
|
||||
state.recordValue(randomDouble());
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 4:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalHDRPercentiles(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentil
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -74,4 +75,47 @@ public class InternalTDigestPercentilesRanksTests extends InternalPercentilesRan
|
|||
protected Class<? extends ParsedPercentiles> implementationClass() {
|
||||
return ParsedTDigestPercentileRanks.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTDigestPercentileRanks mutateInstance(InternalTDigestPercentileRanks instance) {
|
||||
String name = instance.getName();
|
||||
double[] percents = instance.keys;
|
||||
TDigestState state = instance.state;
|
||||
boolean keyed = instance.keyed;
|
||||
DocValueFormat formatter = instance.formatter();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 4)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
percents = Arrays.copyOf(percents, percents.length + 1);
|
||||
percents[percents.length - 1] = randomDouble() * 100;
|
||||
Arrays.sort(percents);
|
||||
break;
|
||||
case 2:
|
||||
TDigestState newState = new TDigestState(state.compression());
|
||||
newState.add(state);
|
||||
for (int i = 0; i < between(10, 100); i++) {
|
||||
newState.add(randomDouble());
|
||||
}
|
||||
state = newState;
|
||||
break;
|
||||
case 3:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 4:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalTDigestPercentileRanks(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentil
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -70,4 +71,47 @@ public class InternalTDigestPercentilesTests extends InternalPercentilesTestCase
|
|||
protected Class<? extends ParsedPercentiles> implementationClass() {
|
||||
return ParsedTDigestPercentiles.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTDigestPercentiles mutateInstance(InternalTDigestPercentiles instance) {
|
||||
String name = instance.getName();
|
||||
double[] percents = instance.keys;
|
||||
TDigestState state = instance.state;
|
||||
boolean keyed = instance.keyed;
|
||||
DocValueFormat formatter = instance.formatter();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 4)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
percents = Arrays.copyOf(percents, percents.length + 1);
|
||||
percents[percents.length - 1] = randomDouble() * 100;
|
||||
Arrays.sort(percents);
|
||||
break;
|
||||
case 2:
|
||||
TDigestState newState = new TDigestState(state.compression());
|
||||
newState.add(state);
|
||||
for (int i = 0; i < between(10, 100); i++) {
|
||||
newState.add(randomDouble());
|
||||
}
|
||||
state = newState;
|
||||
break;
|
||||
case 3:
|
||||
keyed = keyed == false;
|
||||
break;
|
||||
case 4:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalTDigestPercentiles(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.ParsedAggregation;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -191,4 +192,39 @@ public class InternalScriptedMetricTests extends InternalAggregationTestCase<Int
|
|||
protected Predicate<String> excludePathsFromXContentInsertion() {
|
||||
return path -> path.contains(CommonFields.VALUE.getPreferredName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalScriptedMetric mutateInstance(InternalScriptedMetric instance) throws IOException {
|
||||
String name = instance.getName();
|
||||
Object value = instance.aggregation();
|
||||
Script reduceScript = instance.reduceScript;
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
Object newValue = randomValue(valueTypes, 0);
|
||||
while (newValue.equals(value)) {
|
||||
newValue = randomValue(valueTypes, 0);
|
||||
}
|
||||
value = newValue;
|
||||
break;
|
||||
case 2:
|
||||
reduceScript = new Script(ScriptType.INLINE, MockScriptEngine.NAME, REDUCE_SCRIPT_NAME + "-mutated", Collections.emptyMap());
|
||||
break;
|
||||
case 3:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalScriptedMetric(name, value, reduceScript, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -247,4 +247,44 @@ public class InternalTopHitsTests extends InternalAggregationTestCase<InternalTo
|
|||
return comparator.reversed();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalTopHits mutateInstance(InternalTopHits instance) {
|
||||
String name = instance.getName();
|
||||
int from = instance.getFrom();
|
||||
int size = instance.getSize();
|
||||
TopDocs topDocs = instance.getTopDocs();
|
||||
SearchHits searchHits = instance.getHits();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 5)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
from += between(1, 100);
|
||||
break;
|
||||
case 2:
|
||||
size += between(1, 100);
|
||||
break;
|
||||
case 3:
|
||||
topDocs = new TopDocs(topDocs.totalHits + between(1, 100), topDocs.scoreDocs, topDocs.getMaxScore() + randomFloat());
|
||||
break;
|
||||
case 4:
|
||||
searchHits = new SearchHits(searchHits.getHits(), searchHits.totalHits + between(1, 100),
|
||||
searchHits.getMaxScore() + randomFloat());
|
||||
break;
|
||||
case 5:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalTopHits(name, from, size, topDocs, searchHits, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.search.aggregations.ParsedAggregation;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -50,4 +51,35 @@ public class InternalValueCountTests extends InternalAggregationTestCase<Interna
|
|||
assertEquals(valueCount.getValue(), ((ParsedValueCount) parsedAggregation).getValue(), 0);
|
||||
assertEquals(valueCount.getValueAsString(), ((ParsedValueCount) parsedAggregation).getValueAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalValueCount mutateInstance(InternalValueCount instance) {
|
||||
String name = instance.getName();
|
||||
long value = instance.getValue();
|
||||
List<PipelineAggregator> pipelineAggregators = instance.pipelineAggregators();
|
||||
Map<String, Object> metaData = instance.getMetaData();
|
||||
switch (between(0, 2)) {
|
||||
case 0:
|
||||
name += randomAlphaOfLength(5);
|
||||
break;
|
||||
case 1:
|
||||
if (Double.isFinite(value)) {
|
||||
value += between(1, 100);
|
||||
} else {
|
||||
value = between(1, 100);
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
if (metaData == null) {
|
||||
metaData = new HashMap<>(1);
|
||||
} else {
|
||||
metaData = new HashMap<>(instance.getMetaData());
|
||||
}
|
||||
metaData.put(randomAlphaOfLength(15), randomInt());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
return new InternalValueCount(name, value, pipelineAggregators, metaData);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue