Add parsing methods to Range aggregations (#24583)

This commit is contained in:
Tanguy Leroux 2017-05-12 16:52:47 +02:00 committed by GitHub
parent 29a5694bb7
commit b9d2ecc3ea
11 changed files with 407 additions and 11 deletions

View File

@ -107,6 +107,10 @@ public abstract class ParsedMultiBucketAggregation<B extends ParsedMultiBucketAg
this.keyed = keyed; this.keyed = keyed;
} }
protected boolean isKeyed() {
return keyed;
}
protected void setAggregations(Aggregations aggregations) { protected void setAggregations(Aggregations aggregations) {
this.aggregations = aggregations; this.aggregations = aggregations;
} }

View File

@ -0,0 +1,193 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
public class ParsedRange extends ParsedMultiBucketAggregation<ParsedRange.ParsedBucket> implements Range {
@Override
public String getType() {
return RangeAggregationBuilder.NAME;
}
@Override
public List<? extends Range.Bucket> getBuckets() {
return buckets;
}
protected static void declareParsedRangeFields(final ObjectParser<? extends ParsedRange, Void> objectParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> keyedBucketParser) {
declareMultiBucketAggregationFields(objectParser, bucketParser::apply, keyedBucketParser::apply);
}
private static ObjectParser<ParsedRange, Void> PARSER =
new ObjectParser<>(ParsedRange.class.getSimpleName(), true, ParsedRange::new);
static {
declareParsedRangeFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedRange fromXContent(XContentParser parser, String name) throws IOException {
ParsedRange aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Range.Bucket {
protected String key;
protected double from = Double.NEGATIVE_INFINITY;
protected String fromAsString;
protected double to = Double.POSITIVE_INFINITY;
protected String toAsString;
@Override
public String getKey() {
return getKeyAsString();
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
return key;
}
@Override
public Object getFrom() {
return from;
}
@Override
public String getFromAsString() {
if (fromAsString != null) {
return fromAsString;
}
return doubleAsString(from);
}
@Override
public Object getTo() {
return to;
}
@Override
public String getToAsString() {
if (toAsString != null) {
return toAsString;
}
return doubleAsString(to);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (isKeyed()) {
builder.startObject(key);
} else {
builder.startObject();
builder.field(CommonFields.KEY.getPreferredName(), key);
}
if (Double.isInfinite(from) == false) {
builder.field(CommonFields.FROM.getPreferredName(), from);
if (fromAsString != null) {
builder.field(CommonFields.FROM_AS_STRING.getPreferredName(), fromAsString);
}
}
if (Double.isInfinite(to) == false) {
builder.field(CommonFields.TO.getPreferredName(), to);
if (toAsString != null) {
builder.field(CommonFields.TO_AS_STRING.getPreferredName(), toAsString);
}
}
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
getAggregations().toXContentInternal(builder, params);
builder.endObject();
return builder;
}
private static String doubleAsString(double d) {
return Double.isInfinite(d) ? null : Double.toString(d);
}
protected static <B extends ParsedBucket> B parseRangeBucketXContent(final XContentParser parser,
final Supplier<B> bucketSupplier,
final boolean keyed) throws IOException {
final B bucket = bucketSupplier.get();
bucket.setKeyed(keyed);
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (keyed) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
bucket.key = currentFieldName;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
List<Aggregation> aggregations = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.setKeyAsString(parser.text());
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
bucket.key = parser.text();
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
} else if (CommonFields.FROM.getPreferredName().equals(currentFieldName)) {
bucket.from = parser.doubleValue();
} else if (CommonFields.FROM_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.fromAsString = parser.text();
} else if (CommonFields.TO.getPreferredName().equals(currentFieldName)) {
bucket.to = parser.doubleValue();
} else if (CommonFields.TO_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.toAsString = parser.text();
}
} else if (token == XContentParser.Token.START_OBJECT) {
aggregations.add(XContentParserUtils.parseTypedKeysObject(parser, Aggregation.TYPED_KEYS_DELIMITER, Aggregation.class));
}
}
bucket.setAggregations(new Aggregations(aggregations));
return bucket;
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
}
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.range.ParsedRange;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException;
public class ParsedDateRange extends ParsedRange {
@Override
public String getType() {
return DateRangeAggregationBuilder.NAME;
}
private static ObjectParser<ParsedDateRange, Void> PARSER =
new ObjectParser<>(ParsedDateRange.class.getSimpleName(), true, ParsedDateRange::new);
static {
declareParsedRangeFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedDateRange fromXContent(XContentParser parser, String name) throws IOException {
ParsedDateRange aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedRange.ParsedBucket {
@Override
public Object getFrom() {
return doubleAsDateTime(from);
}
@Override
public Object getTo() {
return doubleAsDateTime(to);
}
private static DateTime doubleAsDateTime(Double d) {
if (d == null || Double.isInfinite(d)) {
return null;
}
return new DateTime(d.longValue(), DateTimeZone.UTC);
}
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
}
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.range.ParsedRange;
import java.io.IOException;
public class ParsedGeoDistance extends ParsedRange {
@Override
public String getType() {
return GeoDistanceAggregationBuilder.NAME;
}
private static ObjectParser<ParsedGeoDistance, Void> PARSER =
new ObjectParser<>(ParsedGeoDistance.class.getSimpleName(), true, ParsedGeoDistance::new);
static {
declareParsedRangeFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
}
public static ParsedGeoDistance fromXContent(XContentParser parser, String name) throws IOException {
ParsedGeoDistance aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
public static class ParsedBucket extends ParsedRange.ParsedBucket {
static ParsedBucket fromXContent(final XContentParser parser, final boolean keyed) throws IOException {
return parseRangeBucketXContent(parser, ParsedBucket::new, keyed);
}
}
}

View File

@ -37,6 +37,9 @@ import org.elasticsearch.search.aggregations.bucket.missing.InternalMissingTests
import org.elasticsearch.search.aggregations.bucket.nested.InternalNestedTests; import org.elasticsearch.search.aggregations.bucket.nested.InternalNestedTests;
import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNestedTests; import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNestedTests;
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSamplerTests; import org.elasticsearch.search.aggregations.bucket.sampler.InternalSamplerTests;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTests;
import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRangeTests;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistanceTests;
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTermsTests; import org.elasticsearch.search.aggregations.bucket.terms.DoubleTermsTests;
import org.elasticsearch.search.aggregations.bucket.terms.LongTermsTests; import org.elasticsearch.search.aggregations.bucket.terms.LongTermsTests;
import org.elasticsearch.search.aggregations.bucket.terms.StringTermsTests; import org.elasticsearch.search.aggregations.bucket.terms.StringTermsTests;
@ -119,6 +122,9 @@ public class AggregationsTests extends ESTestCase {
aggsTests.add(new InternalFilterTests()); aggsTests.add(new InternalFilterTests());
aggsTests.add(new InternalSamplerTests()); aggsTests.add(new InternalSamplerTests());
aggsTests.add(new InternalGeoHashGridTests()); aggsTests.add(new InternalGeoHashGridTests());
aggsTests.add(new InternalRangeTests());
aggsTests.add(new InternalDateRangeTests());
aggsTests.add(new InternalGeoDistanceTests());
return Collections.unmodifiableList(aggsTests); return Collections.unmodifiableList(aggsTests);
} }

View File

@ -117,7 +117,7 @@ public abstract class InternalMultiBucketAggregationTestCase<T extends InternalA
} }
} }
private void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) { protected void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) {
assertTrue(expected instanceof InternalMultiBucketAggregation.InternalBucket); assertTrue(expected instanceof InternalMultiBucketAggregation.InternalBucket);
assertTrue(actual instanceof ParsedMultiBucketAggregation.ParsedBucket); assertTrue(actual instanceof ParsedMultiBucketAggregation.ParsedBucket);

View File

@ -20,15 +20,17 @@
package org.elasticsearch.search.aggregations.bucket.range; package org.elasticsearch.search.aggregations.bucket.range;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.Before; import org.junit.Before;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
public abstract class InternalRangeTestCase<T extends InternalAggregation & Range> extends InternalAggregationTestCase<T> { public abstract class InternalRangeTestCase<T extends InternalAggregation & Range> extends InternalMultiBucketAggregationTestCase<T> {
private boolean keyed; private boolean keyed;
@ -40,13 +42,17 @@ public abstract class InternalRangeTestCase<T extends InternalAggregation & Rang
} }
@Override @Override
protected T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) { protected T createTestInstance(String name,
return createTestInstance(name, pipelineAggregators, metaData, keyed); List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations) {
return createTestInstance(name, pipelineAggregators, metaData, aggregations, keyed);
} }
protected abstract T createTestInstance(String name, protected abstract T createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed); boolean keyed);
@Override @Override
protected void assertReduced(T reduced, List<T> inputs) { protected void assertReduced(T reduced, List<T> inputs) {
@ -65,4 +71,20 @@ public abstract class InternalRangeTestCase<T extends InternalAggregation & Rang
} }
assertEquals(expectedCounts, actualCounts); assertEquals(expectedCounts, actualCounts);
} }
@Override
protected void assertBucket(MultiBucketsAggregation.Bucket expected, MultiBucketsAggregation.Bucket actual, boolean checkOrder) {
super.assertBucket(expected, actual, checkOrder);
assertTrue(expected instanceof InternalRange.Bucket);
assertTrue(actual instanceof ParsedRange.ParsedBucket);
Range.Bucket expectedRange = (Range.Bucket) expected;
Range.Bucket actualRange = (Range.Bucket) actual;
assertEquals(expectedRange.getFrom(), actualRange.getFrom());
assertEquals(expectedRange.getFromAsString(), actualRange.getFromAsString());
assertEquals(expectedRange.getTo(), actualRange.getTo());
assertEquals(expectedRange.getToAsString(), actualRange.getToAsString());
}
} }

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before; import org.junit.Before;
@ -43,7 +44,7 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
format = randomNumericDocValueFormat(); format = randomNumericDocValueFormat();
final int interval = randomFrom(1, 5, 10, 25, 50, 100); final int interval = randomFrom(1, 5, 10, 25, 50, 100);
final int numRanges = 1;//randomIntBetween(1, 10); final int numRanges = randomIntBetween(1, 10);
List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges); List<Tuple<Double, Double>> listOfRanges = new ArrayList<>(numRanges);
for (int i = 0; i < numRanges; i++) { for (int i = 0; i < numRanges; i++) {
@ -58,11 +59,23 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
listOfRanges.add(Tuple.tuple(0.0, max / 2)); listOfRanges.add(Tuple.tuple(0.0, max / 2));
listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2)); listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2));
} }
if (rarely()) {
listOfRanges.add(Tuple.tuple(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY));
}
if (rarely()) {
listOfRanges.add(Tuple.tuple(Double.NEGATIVE_INFINITY, randomDouble()));
}
if (rarely()) {
listOfRanges.add(Tuple.tuple(randomDouble(), Double.POSITIVE_INFINITY));
}
ranges = Collections.unmodifiableList(listOfRanges); ranges = Collections.unmodifiableList(listOfRanges);
} }
@Override @Override
protected InternalRange createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData, protected InternalRange createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed) { boolean keyed) {
final List<InternalRange.Bucket> buckets = new ArrayList<>(); final List<InternalRange.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < ranges.size(); ++i) { for (int i = 0; i < ranges.size(); ++i) {
@ -70,13 +83,18 @@ public class InternalRangeTests extends InternalRangeTestCase<InternalRange> {
int docCount = randomIntBetween(0, 1000); int docCount = randomIntBetween(0, 1000);
double from = range.v1(); double from = range.v1();
double to = range.v2(); double to = range.v2();
buckets.add( new InternalRange.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed, format)); buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format));
} }
return new InternalRange<>(name, buckets, format, keyed, pipelineAggregators, Collections.emptyMap()); return new InternalRange<>(name, buckets, format, keyed, pipelineAggregators, metaData);
} }
@Override @Override
protected Writeable.Reader<InternalRange> instanceReader() { protected Writeable.Reader<InternalRange> instanceReader() {
return InternalRange::new; return InternalRange::new;
} }
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedRange.class;
}
} }

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase; import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.joda.time.DateTime; import org.joda.time.DateTime;
@ -78,6 +79,7 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
protected InternalDateRange createTestInstance(String name, protected InternalDateRange createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed) { boolean keyed) {
final List<InternalDateRange.Bucket> buckets = new ArrayList<>(); final List<InternalDateRange.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < dateRanges.size(); ++i) { for (int i = 0; i < dateRanges.size(); ++i) {
@ -85,7 +87,7 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
int docCount = randomIntBetween(0, 1000); int docCount = randomIntBetween(0, 1000);
double from = range.v1(); double from = range.v1();
double to = range.v2(); double to = range.v2();
buckets.add( new InternalDateRange.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed, format)); buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format));
} }
return new InternalDateRange(name, buckets, format, keyed, pipelineAggregators, metaData); return new InternalDateRange(name, buckets, format, keyed, pipelineAggregators, metaData);
} }
@ -94,4 +96,9 @@ public class InternalDateRangeTests extends InternalRangeTestCase<InternalDateRa
protected Writeable.Reader<InternalDateRange> instanceReader() { protected Writeable.Reader<InternalDateRange> instanceReader() {
return InternalDateRange::new; return InternalDateRange::new;
} }
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedDateRange.class;
}
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase; import org.elasticsearch.search.aggregations.bucket.range.InternalRangeTestCase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.Before; import org.junit.Before;
@ -58,6 +59,7 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
} }
geoDistanceRanges = Collections.unmodifiableList(listOfRanges); geoDistanceRanges = Collections.unmodifiableList(listOfRanges);
} }
@Override @Override
protected Writeable.Reader<InternalGeoDistance> instanceReader() { protected Writeable.Reader<InternalGeoDistance> instanceReader() {
return InternalGeoDistance::new; return InternalGeoDistance::new;
@ -67,6 +69,7 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
protected InternalGeoDistance createTestInstance(String name, protected InternalGeoDistance createTestInstance(String name,
List<PipelineAggregator> pipelineAggregators, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, Map<String, Object> metaData,
InternalAggregations aggregations,
boolean keyed) { boolean keyed) {
final List<InternalGeoDistance.Bucket> buckets = new ArrayList<>(); final List<InternalGeoDistance.Bucket> buckets = new ArrayList<>();
for (int i = 0; i < geoDistanceRanges.size(); ++i) { for (int i = 0; i < geoDistanceRanges.size(); ++i) {
@ -74,8 +77,13 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase<InternalGeoD
int docCount = randomIntBetween(0, 1000); int docCount = randomIntBetween(0, 1000);
double from = range.v1(); double from = range.v1();
double to = range.v2(); double to = range.v2();
buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, InternalAggregations.EMPTY, keyed)); buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations, keyed));
} }
return new InternalGeoDistance(name, buckets, keyed, pipelineAggregators, metaData); return new InternalGeoDistance(name, buckets, keyed, pipelineAggregators, metaData);
} }
@Override
protected Class<? extends ParsedMultiBucketAggregation> implementationClass() {
return ParsedGeoDistance.class;
}
} }

View File

@ -59,6 +59,12 @@ import org.elasticsearch.search.aggregations.bucket.nested.ParsedReverseNested;
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler; import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
import org.elasticsearch.search.aggregations.bucket.sampler.ParsedSampler; import org.elasticsearch.search.aggregations.bucket.sampler.ParsedSampler;
import org.elasticsearch.search.aggregations.bucket.range.ParsedRange;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.date.ParsedDateRange;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.ParsedGeoDistance;
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms;
@ -164,6 +170,9 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
namedXContents.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c)); namedXContents.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c));
namedXContents.put(InternalSampler.NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c)); namedXContents.put(InternalSampler.NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c));
namedXContents.put(GeoGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c)); namedXContents.put(GeoGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c));
namedXContents.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c));
namedXContents.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c));
namedXContents.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c));
return namedXContents.entrySet().stream() return namedXContents.entrySet().stream()
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))