Aggregations: Clean up response API for Aggregations

This change makes the response API object for Histogram Aggregations the same for all types of Histogram, and does the same for all types of Ranges.
The change removes getBucketByKey() from all aggregations except filters and terms. It also reduces the methods on the Bucket class to just getKey() and getKeyAsString().
The getKey() method returns Object and the actual Type is returns will be appropriate for the type of aggregation being run. e.g. date_histogram will return a DateTime for this method and Histogram will return a Number.
This commit is contained in:
Colin Goodheart-Smithe 2015-01-09 15:20:05 +00:00
parent 78c52d559d
commit 285ef0f06d
72 changed files with 1861 additions and 1958 deletions

View File

@ -11,7 +11,7 @@ if a requested index does not exist. This change brings the defaults for this AP
line with the other Indices APIs. The <<multi-index>> options can be used on a request
to change this behavior
`GetIndexRequest.features()` now returns an array of Feature Enums instrad of an array of String values.
`GetIndexRequest.features()` now returns an array of Feature Enums instead of an array of String values.
The following deprecated methods have been removed:
* `GetIndexRequest.addFeatures(String[])` - Please use `GetIndexRequest.addFeatures(Feature[])` instead
* `GetIndexRequest.features(String[])` - Please use `GetIndexRequest.features(Feature[])` instead
@ -107,6 +107,20 @@ Some query builders have been removed or renamed:
* `filtered(...)` removed. Use `filteredQuery(...)` instead.
* `inQuery(...)` removed.
==== Aggregations
The `date_histogram` aggregation now returns a `Histogram` object in the response, and the `DateHistogram` class has been removed. Similarly
the `date_range`, `ipv4_range`, and `geo_distance` aggregations all return a `Range` object in the response, and the `IPV4Range`, `DateRange`,
and `GeoDistance` classes have been removed. The motivation for this is to have a single response API for the Range and Histogram aggregations
regardless of the type of data being queried. To support this some changes were made in the `MultiBucketAggregation` interface which applies
to all bucket aggregations:
* The `getKey()` method now returns `Object` instead of `String`. The actual object type returned depends on the type of aggregation requested
(e.g. the `date_histogram` will return a `DateTime` object for this method whereas a `histogram` will return a `Number`).
* A `getKeyAsString()` method has been added to return the String representation of the key.
* All other `getKeyAsX()` methods have been removed.
* The `getBucketAsKey(String)` methods have been removed on all aggregations except the `filters` and `terms` aggregations.
=== Terms filter lookup caching
The terms filter lookup mechanism does not support the `cache` option anymore
@ -132,3 +146,4 @@ primary shards.
=== Mappings
The setting `index.mapping.allow_type_wrapper` has been removed. Documents should always be sent without the type as the root element.

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.search.aggregations.bucket.children.Children;
import org.elasticsearch.search.aggregations.bucket.children.ChildrenBuilder;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
@ -28,7 +29,6 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridBuilder;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.global.GlobalBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramBuilder;
@ -40,11 +40,8 @@ import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested;
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedBuilder;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.RangeBuilder;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRange;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeBuilder;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistance;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceBuilder;
import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4Range;
import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.Comparators;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.search.aggregations.Aggregation;
@ -28,7 +27,6 @@ import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.HasAggregations;
import org.elasticsearch.search.aggregations.support.AggregationPath;
import java.util.Collection;
import java.util.List;
/**
@ -44,14 +42,14 @@ public interface MultiBucketsAggregation extends Aggregation {
public interface Bucket extends HasAggregations, ToXContent, Streamable {
/**
* @return The key associated with the bucket as a string
* @return The key associated with the bucket
*/
String getKey();
Object getKey();
/**
* @return The key associated with the bucket as text (ideal for further streaming this instance)
* @return The key associated with the bucket as a string
*/
Text getKeyAsText();
String getKeyAsString();
/**
* @return The number of documents that fall within this bucket
@ -96,12 +94,4 @@ public interface MultiBucketsAggregation extends Aggregation {
* @return The buckets of this aggregation.
*/
List<? extends Bucket> getBuckets();
/**
* The bucket that is associated with the given key.
*
* @param key The key of the requested bucket.
* @return The bucket
*/
<B extends Bucket> B getBucketByKey(String key);
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.filters;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import java.util.Collection;
import java.util.List;
/**
@ -41,7 +40,6 @@ public interface Filters extends MultiBucketsAggregation {
*/
List<? extends Bucket> getBuckets();
@Override
Bucket getBucketByKey(String key);
}

View File

@ -20,12 +20,15 @@
package org.elasticsearch.search.aggregations.bucket.filters;
import com.google.common.collect.Lists;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
@ -91,13 +94,14 @@ public class InternalFilters extends InternalMultiBucketAggregation implements F
this.keyed = keyed;
}
@Override
public String getKey() {
return key;
}
@Override
public Text getKeyAsText() {
return new StringText(getKey());
public String getKeyAsString() {
return key;
}
@Override

View File

@ -18,11 +18,8 @@
*/
package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import java.util.Collection;
import java.util.List;
/**
@ -36,16 +33,6 @@ public interface GeoHashGrid extends MultiBucketsAggregation {
*/
public static interface Bucket extends MultiBucketsAggregation.Bucket {
/**
* @return The geohash of the cell as a geo point
*/
GeoPoint getKeyAsGeoPoint();
/**
* @return A numeric representation of the geohash of the cell
*/
Number getKeyAsNumber();
}
/**
@ -54,11 +41,4 @@ public interface GeoHashGrid extends MultiBucketsAggregation {
@Override
List<Bucket> getBuckets();
@Override
Bucket getBucketByKey(String key);
Bucket getBucketByKey(Number key);
Bucket getBucketByKey(GeoPoint key);
}

View File

@ -23,16 +23,22 @@ import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.LongObjectPagedHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* Represents a grid of cells where each cell's location is determined by a geohash.
@ -90,16 +96,13 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation implemen
this.geohashAsLong = geohashAsLong;
}
public String getKey() {
@Override
public String getKeyAsString() {
return GeoHashUtils.toString(geohashAsLong);
}
@Override
public Text getKeyAsText() {
return new StringText(getKey());
}
public GeoPoint getKeyAsGeoPoint() {
public GeoPoint getKey() {
return GeoHashUtils.decode(geohashAsLong);
}
@ -135,11 +138,6 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation implemen
return new Bucket(geohashAsLong, docCount, aggs);
}
@Override
public Number getKeyAsNumber() {
return geohashAsLong;
}
@Override
public void readFrom(StreamInput in) throws IOException {
geohashAsLong = in.readLong();
@ -157,7 +155,7 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation implemen
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CommonFields.KEY, getKeyAsText());
builder.field(CommonFields.KEY, getKeyAsString());
builder.field(CommonFields.DOC_COUNT, docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();
@ -189,27 +187,6 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation implemen
return (List<GeoHashGrid.Bucket>) o;
}
@Override
public GeoHashGrid.Bucket getBucketByKey(String geohash) {
if (bucketMap == null) {
bucketMap = new HashMap<>(buckets.size());
for (Bucket bucket : buckets) {
bucketMap.put(bucket.getKey(), bucket);
}
}
return bucketMap.get(geohash);
}
@Override
public GeoHashGrid.Bucket getBucketByKey(Number key) {
return getBucketByKey(GeoHashUtils.toString(key.longValue()));
}
@Override
public GeoHashGrid.Bucket getBucketByKey(GeoPoint key) {
return getBucketByKey(key.geohash());
}
@Override
public InternalGeoHashGrid reduce(ReduceContext reduceContext) {
List<InternalAggregation> aggregations = reduceContext.aggregations();

View File

@ -1,95 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.joda.time.DateTime;
import java.util.List;
/**
* A {@code date_histogram} aggregation.
*/
public interface DateHistogram extends Histogram {
static interface Bucket extends Histogram.Bucket {
/**
* @return the key as a date construct (in UTC timezone).
*/
DateTime getKeyAsDate();
}
@Override
List<? extends DateHistogram.Bucket> getBuckets();
@Override
Bucket getBucketByKey(String key);
@Override
Bucket getBucketByKey(Number key);
Bucket getBucketByKey(DateTime key);
/**
* The interval the date histogram is based on.
*/
static class Interval {
public static final Interval SECOND = new Interval("1s");
public static final Interval MINUTE = new Interval("1m");
public static final Interval HOUR = new Interval("1h");
public static final Interval DAY = new Interval("1d");
public static final Interval WEEK = new Interval("1w");
public static final Interval MONTH = new Interval("1M");
public static final Interval QUARTER = new Interval("1q");
public static final Interval YEAR = new Interval("1y");
public static Interval seconds(int sec) {
return new Interval(sec + "s");
}
public static Interval minutes(int min) {
return new Interval(min + "m");
}
public static Interval hours(int hours) {
return new Interval(hours + "h");
}
public static Interval days(int days) {
return new Interval(days + "d");
}
public static Interval weeks(int weeks) {
return new Interval(weeks + "w");
}
private final String expression;
public Interval(String expression) {
this.expression = expression;
}
@Override
public String toString() {
return expression;
}
}
}

View File

@ -63,7 +63,7 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
/**
* Set the interval.
*/
public DateHistogramBuilder interval(DateHistogram.Interval interval) {
public DateHistogramBuilder interval(DateHistogramInterval interval) {
this.interval = interval;
return this;
}
@ -71,7 +71,7 @@ public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHis
/**
* Set the order by which the buckets will be returned.
*/
public DateHistogramBuilder order(DateHistogram.Order order) {
public DateHistogramBuilder order(Histogram.Order order) {
this.order = order;
return this;
}

View File

@ -0,0 +1,66 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.histogram;
/**
* The interval the date histogram is based on.
*/
public class DateHistogramInterval {
public static final DateHistogramInterval SECOND = new DateHistogramInterval("1s");
public static final DateHistogramInterval MINUTE = new DateHistogramInterval("1m");
public static final DateHistogramInterval HOUR = new DateHistogramInterval("1h");
public static final DateHistogramInterval DAY = new DateHistogramInterval("1d");
public static final DateHistogramInterval WEEK = new DateHistogramInterval("1w");
public static final DateHistogramInterval MONTH = new DateHistogramInterval("1M");
public static final DateHistogramInterval QUARTER = new DateHistogramInterval("1q");
public static final DateHistogramInterval YEAR = new DateHistogramInterval("1y");
public static DateHistogramInterval seconds(int sec) {
return new DateHistogramInterval(sec + "s");
}
public static DateHistogramInterval minutes(int min) {
return new DateHistogramInterval(min + "m");
}
public static DateHistogramInterval hours(int hours) {
return new DateHistogramInterval(hours + "h");
}
public static DateHistogramInterval days(int days) {
return new DateHistogramInterval(days + "d");
}
public static DateHistogramInterval weeks(int weeks) {
return new DateHistogramInterval(weeks + "w");
}
private final String expression;
public DateHistogramInterval(String expression) {
this.expression = expression;
}
@Override
public String toString() {
return expression;
}
}

View File

@ -34,11 +34,6 @@ public interface Histogram extends MultiBucketsAggregation {
*/
static interface Bucket extends MultiBucketsAggregation.Bucket {
/**
* @return The key associated with the bucket (all documents that fall in this bucket were rounded to this key)
*/
Number getKeyAsNumber();
}
/**
@ -46,22 +41,6 @@ public interface Histogram extends MultiBucketsAggregation {
*/
List<? extends Bucket> getBuckets();
/**
* Returns a bucket by the key associated with it.
*
* @param key The key of the bucket.
* @return The bucket that is associated with the given key.
*/
Bucket getBucketByKey(String key);
/**
* Returns a bucket by the key associated with it.
*
* @param key The key of the bucket.
* @return The bucket that is associated with the given key.
*/
Bucket getBucketByKey(Number key);
/**
* A strategy defining the order in which the buckets in this histogram are ordered.

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.aggregations.AggregationStreams;
@ -36,7 +37,7 @@ import java.util.Map;
/**
*
*/
public class InternalDateHistogram extends InternalHistogram<InternalDateHistogram.Bucket> implements DateHistogram {
public class InternalDateHistogram extends InternalHistogram<InternalDateHistogram.Bucket> {
final static Type TYPE = new Type("date_histogram", "dhisto");
final static Factory FACTORY = new Factory();
@ -71,7 +72,7 @@ public class InternalDateHistogram extends InternalHistogram<InternalDateHistogr
BucketStreams.registerStream(BUCKET_STREAM, TYPE.stream());
}
static class Bucket extends InternalHistogram.Bucket implements DateHistogram.Bucket {
static class Bucket extends InternalHistogram.Bucket {
Bucket(boolean keyed, @Nullable ValueFormatter formatter) {
super(keyed, formatter);
@ -87,18 +88,18 @@ public class InternalDateHistogram extends InternalHistogram<InternalDateHistogr
}
@Override
public String getKey() {
public String getKeyAsString() {
return formatter != null ? formatter.format(key) : ValueFormatter.DateTime.DEFAULT.format(key);
}
@Override
public DateTime getKeyAsDate() {
public DateTime getKey() {
return new DateTime(key, DateTimeZone.UTC);
}
@Override
public String toString() {
return getKey();
return getKeyAsString();
}
}
@ -143,28 +144,6 @@ public class InternalDateHistogram extends InternalHistogram<InternalDateHistogr
return FACTORY;
}
@Override
public Bucket getBucketByKey(String key) {
try {
long time = Long.parseLong(key);
return super.getBucketByKey(time);
} catch (NumberFormatException nfe) {
// it's not a number, so lets try to parse it as a date using the formatter.
}
if (bucketsMap == null) {
bucketsMap = new ObjectObjectOpenHashMap<>();
for (InternalDateHistogram.Bucket bucket : buckets) {
bucketsMap.put(bucket.getKey(), bucket);
}
}
return bucketsMap.get(key);
}
@Override
public DateHistogram.Bucket getBucketByKey(DateTime key) {
return getBucketByKey(key.getMillis());
}
@Override
protected InternalDateHistogram.Bucket createBucket(long key, long docCount, InternalAggregations aggregations, boolean keyed, ValueFormatter formatter) {
return new Bucket(key, docCount, aggregations, keyed, formatter);

View File

@ -23,16 +23,18 @@ import com.google.common.collect.Lists;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.rounding.Rounding;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.LongObjectPagedHashMap;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.*;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
@ -110,17 +112,12 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
}
@Override
public String getKey() {
public String getKeyAsString() {
return formatter != null ? formatter.format(key) : ValueFormatter.RAW.format(key);
}
@Override
public Text getKeyAsText() {
return new StringText(getKey());
}
@Override
public Number getKeyAsNumber() {
public Object getKey() {
return key;
}
@ -157,7 +154,7 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
builder.field(CommonFields.KEY_AS_STRING, keyTxt);
} else {
if (keyed) {
builder.startObject(String.valueOf(getKeyAsNumber()));
builder.startObject(String.valueOf(getKey()));
} else {
builder.startObject();
}
@ -272,22 +269,6 @@ public class InternalHistogram<B extends InternalHistogram.Bucket> extends Inter
return buckets;
}
@Override
public B getBucketByKey(String key) {
return getBucketByKey(Long.valueOf(key));
}
@Override
public B getBucketByKey(Number key) {
if (bucketsMap == null) {
bucketsMap = new LongObjectOpenHashMap<>(buckets.size());
for (B bucket : buckets) {
bucketsMap.put(bucket.key, bucket);
}
}
return bucketsMap.get(key.longValue());
}
protected Factory<B> getFactory() {
return FACTORY;
}

View File

@ -23,8 +23,6 @@ import com.google.common.collect.Lists;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.Aggregations;
@ -38,7 +36,6 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatterStream
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -86,8 +83,8 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalMulti
protected transient final boolean keyed;
protected transient final ValueFormatter formatter;
private double from;
private double to;
protected double from;
protected double to;
private long docCount;
InternalAggregations aggregations;
private String key;
@ -107,21 +104,20 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalMulti
}
public String getKey() {
return getKeyAsString();
}
public String getKeyAsString() {
return key;
}
@Override
public Text getKeyAsText() {
return new StringText(getKey());
}
@Override
public Number getFrom() {
public Object getFrom() {
return from;
}
@Override
public Number getTo() {
public Object getTo() {
return to;
}
@ -257,17 +253,6 @@ public class InternalRange<B extends InternalRange.Bucket> extends InternalMulti
return ranges;
}
@Override
public B getBucketByKey(String key) {
if (rangeMap == null) {
rangeMap = new HashMap<>(ranges.size());
for (Range.Bucket bucket : ranges) {
rangeMap.put(bucket.getKey(), (B) bucket);
}
}
return rangeMap.get(key);
}
protected Factory<B, ?> getFactory() {
return FACTORY;
}

View File

@ -36,7 +36,7 @@ public interface Range extends MultiBucketsAggregation {
/**
* @return The lower bound of the range
*/
Number getFrom();
Object getFrom();
/**
* @return The string value for the lower bound of the range
@ -46,7 +46,7 @@ public interface Range extends MultiBucketsAggregation {
/**
* @return The upper bound of the range (excluding)
*/
Number getTo();
Object getTo();
/**
* @return The string value for the upper bound of the range (excluding)
@ -59,7 +59,4 @@ public interface Range extends MultiBucketsAggregation {
*/
List<? extends Bucket> getBuckets();
@Override
Bucket getBucketByKey(String key);
}

View File

@ -1,45 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.joda.time.DateTime;
import java.util.Collection;
import java.util.List;
/**
* A range aggregation on date values.
*/
public interface DateRange extends Range {
static interface Bucket extends Range.Bucket {
DateTime getFromAsDate();
DateTime getToAsDate();
}
@Override
List<? extends Bucket> getBuckets();
@Override
DateRange.Bucket getBucketByKey(String key);
}

View File

@ -37,7 +37,7 @@ import java.util.Map;
/**
*
*/
public class InternalDateRange extends InternalRange<InternalDateRange.Bucket> implements DateRange {
public class InternalDateRange extends InternalRange<InternalDateRange.Bucket> {
public final static Type TYPE = new Type("date_range", "drange");
@ -74,7 +74,7 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket> i
public static final Factory FACTORY = new Factory();
public static class Bucket extends InternalRange.Bucket implements DateRange.Bucket {
public static class Bucket extends InternalRange.Bucket {
public Bucket(boolean keyed, @Nullable ValueFormatter formatter) {
super(keyed, formatter);
@ -89,13 +89,13 @@ public class InternalDateRange extends InternalRange<InternalDateRange.Bucket> i
}
@Override
public DateTime getFromAsDate() {
return Double.isInfinite(getFrom().doubleValue()) ? null : new DateTime(getFrom().longValue(), DateTimeZone.UTC);
public Object getFrom() {
return Double.isInfinite(((Number) from).doubleValue()) ? null : new DateTime(((Number) from).longValue(), DateTimeZone.UTC);
}
@Override
public DateTime getToAsDate() {
return Double.isInfinite(getTo().doubleValue()) ? null : new DateTime(getTo().longValue(), DateTimeZone.UTC);
public Object getTo() {
return Double.isInfinite(((Number) to).doubleValue()) ? null : new DateTime(((Number) to).longValue(), DateTimeZone.UTC);
}
@Override

View File

@ -1,42 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import java.util.Collection;
import java.util.List;
/**
* An aggregation that computes ranges of geo distances.
*/
public interface GeoDistance extends Range {
/**
* A range bucket.
*/
public static interface Bucket extends Range.Bucket {
}
@Override
List<? extends Bucket> getBuckets();
@Override
GeoDistance.Bucket getBucketByKey(String key);
}

View File

@ -35,7 +35,7 @@ import java.util.Map;
/**
*
*/
public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucket> implements GeoDistance {
public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucket> {
public static final Type TYPE = new Type("geo_distance", "gdist");
@ -72,7 +72,7 @@ public class InternalGeoDistance extends InternalRange<InternalGeoDistance.Bucke
public static final Factory FACTORY = new Factory();
static class Bucket extends InternalRange.Bucket implements GeoDistance.Bucket {
static class Bucket extends InternalRange.Bucket {
Bucket(boolean keyed, @Nullable ValueFormatter formatter) {
super(keyed, formatter);

View File

@ -1,39 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.ipv4;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import java.util.List;
/**
* A range aggregation on ipv4 values.
*/
public interface IPv4Range extends Range {
static interface Bucket extends Range.Bucket {
}
@Override
List<? extends Bucket> getBuckets();
@Override
IPv4Range.Bucket getBucketByKey(String key);
}

View File

@ -35,7 +35,7 @@ import java.util.Map;
/**
*
*/
public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> implements IPv4Range {
public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> {
public static final long MAX_IP = 4294967296l;
@ -73,7 +73,7 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> i
public static final Factory FACTORY = new Factory();
public static class Bucket extends InternalRange.Bucket implements IPv4Range.Bucket {
public static class Bucket extends InternalRange.Bucket {
public Bucket(boolean keyed) {
super(keyed, ValueFormatter.IPv4);
@ -89,13 +89,13 @@ public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket> i
@Override
public String getFromAsString() {
double from = getFrom().doubleValue();
double from = ((Number) this.from).doubleValue();
return Double.isInfinite(from) ? null : from == 0 ? null : ValueFormatter.IPv4.format(from);
}
@Override
public String getToAsString() {
double to = getTo().doubleValue();
double to = ((Number) this.to).doubleValue();
return Double.isInfinite(to) ? null : MAX_IP == to ? null : ValueFormatter.IPv4.format(to);
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.significant;
import com.google.common.collect.Maps;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.search.aggregations.Aggregations;
@ -27,7 +28,12 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
*
@ -143,7 +149,7 @@ public abstract class InternalSignificantTerms extends InternalMultiBucketAggreg
if (bucketMap == null) {
bucketMap = Maps.newHashMapWithExpectedSize(buckets.size());
for (Bucket bucket : buckets) {
bucketMap.put(bucket.getKey(), bucket);
bucketMap.put(bucket.getKeyAsString(), bucket);
}
}
return bucketMap.get(term);
@ -169,7 +175,7 @@ public abstract class InternalSignificantTerms extends InternalMultiBucketAggreg
List<Bucket> existingBuckets = buckets.get(bucket.getKey());
if (existingBuckets == null) {
existingBuckets = new ArrayList<>(aggregations.size());
buckets.put(bucket.getKey(), existingBuckets);
buckets.put(bucket.getKeyAsString(), existingBuckets);
}
// Adjust the buckets with the global stats representing the
// total size of the pots from which the stats are drawn

View File

@ -18,12 +18,9 @@
*/
package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
@ -100,25 +97,25 @@ public class SignificantLongTerms extends InternalSignificantTerms {
}
@Override
public Text getKeyAsText() {
return new StringText(String.valueOf(term));
}
@Override
public Number getKeyAsNumber() {
public Object getKey() {
return term;
}
@Override
int compareTerm(SignificantTerms.Bucket other) {
return Long.compare(term, other.getKeyAsNumber().longValue());
return Long.compare(term, ((Number) other.getKey()).longValue());
}
@Override
public String getKey() {
public String getKeyAsString() {
return Long.toString(term);
}
@Override
public Number getKeyAsNumber() {
return term;
}
@Override
Bucket newBucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, InternalAggregations aggregations) {
return new Bucket(subsetDf, subsetSize, supersetDf, supersetSize, term, aggregations, formatter);

View File

@ -19,12 +19,8 @@
package org.elasticsearch.search.aggregations.bucket.significant;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.BytesText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
@ -98,11 +94,6 @@ public class SignificantStringTerms extends InternalSignificantTerms {
this.termBytes = term;
}
@Override
public Text getKeyAsText() {
return new BytesText(new BytesArray(termBytes));
}
@Override
public Number getKeyAsNumber() {
// this method is needed for scripted numeric aggregations
@ -115,10 +106,15 @@ public class SignificantStringTerms extends InternalSignificantTerms {
}
@Override
public String getKey() {
public String getKeyAsString() {
return termBytes.utf8ToString();
}
@Override
public String getKey() {
return getKeyAsString();
}
@Override
Bucket newBucket(long subsetDf, long subsetSize, long supersetDf, long supersetSize, InternalAggregations aggregations) {
return new Bucket(termBytes, subsetDf, subsetSize, supersetDf, supersetSize, aggregations);

View File

@ -48,12 +48,12 @@ public interface SignificantTerms extends MultiBucketsAggregation, Iterable<Sign
this.supersetDf = supersetDf;
}
public abstract Number getKeyAsNumber();
abstract int compareTerm(SignificantTerms.Bucket other);
public abstract double getSignificanceScore();
abstract Number getKeyAsNumber();
public long getSubsetDf() {
return subsetDf;
}
@ -75,7 +75,9 @@ public interface SignificantTerms extends MultiBucketsAggregation, Iterable<Sign
@Override
List<Bucket> getBuckets();
@Override
Bucket getBucketByKey(String key);
/**
* Get the bucket for the given term, or null if there is no such bucket.
*/
Bucket getBucketByKey(String term);
}

View File

@ -22,8 +22,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
@ -92,13 +90,13 @@ public class DoubleTerms extends InternalTerms {
}
@Override
public String getKey() {
public String getKeyAsString() {
return String.valueOf(term);
}
@Override
public Text getKeyAsText() {
return new StringText(String.valueOf(term));
public Object getKey() {
return term;
}
@Override
@ -108,12 +106,7 @@ public class DoubleTerms extends InternalTerms {
@Override
int compareTerm(Terms.Bucket other) {
return Double.compare(term, other.getKeyAsNumber().doubleValue());
}
@Override
Object getKeyAsObject() {
return getKeyAsNumber();
return Double.compare(term, ((Number) other.getKey()).doubleValue());
}
@Override

View File

@ -62,7 +62,7 @@ public class DoubleTermsAggregator extends LongTermsAggregator {
}
private static DoubleTerms.Bucket convertToDouble(InternalTerms.Bucket bucket) {
final long term = bucket.getKeyAsNumber().longValue();
final long term = ((Number) bucket.getKey()).longValue();
final double value = NumericUtils.sortableLongToDouble(term);
return new DoubleTerms.Bucket(value, bucket.docCount, bucket.aggregations, bucket.showDocCountError, bucket.docCountError, bucket.formatter);
}

View File

@ -31,7 +31,6 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.IntArray;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -221,17 +220,12 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
}
@Override
public String getKey() {
public String getKeyAsString() {
throw new UnsupportedOperationException();
}
@Override
public Text getKeyAsText() {
throw new UnsupportedOperationException();
}
@Override
Object getKeyAsObject() {
public Object getKey() {
throw new UnsupportedOperationException();
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.Streamable;
@ -32,7 +33,11 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
*
@ -82,8 +87,6 @@ public abstract class InternalTerms extends InternalMultiBucketAggregation imple
return aggregations;
}
abstract Object getKeyAsObject();
abstract Bucket newBucket(long docCount, InternalAggregations aggs, long docCountError);
public Bucket reduce(List<? extends Bucket> buckets, ReduceContext context) {
@ -141,12 +144,12 @@ public abstract class InternalTerms extends InternalMultiBucketAggregation imple
if (bucketMap == null) {
bucketMap = Maps.newHashMapWithExpectedSize(buckets.size());
for (Bucket bucket : buckets) {
bucketMap.put(bucket.getKey(), bucket);
bucketMap.put(bucket.getKeyAsString(), bucket);
}
}
return bucketMap.get(term);
}
public long getDocCountError() {
return docCountError;
}
@ -184,7 +187,7 @@ public abstract class InternalTerms extends InternalMultiBucketAggregation imple
terms.docCountError = thisAggDocCountError;
for (Bucket bucket : terms.buckets) {
bucket.docCountError = thisAggDocCountError;
buckets.put(bucket.getKeyAsObject(), bucket);
buckets.put(bucket.getKey(), bucket);
}
}

View File

@ -18,12 +18,9 @@
*/
package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregations;
@ -92,13 +89,13 @@ public class LongTerms extends InternalTerms {
}
@Override
public String getKey() {
public String getKeyAsString() {
return String.valueOf(term);
}
@Override
public Text getKeyAsText() {
return new StringText(String.valueOf(term));
public Object getKey() {
return term;
}
@Override
@ -108,12 +105,7 @@ public class LongTerms extends InternalTerms {
@Override
int compareTerm(Terms.Bucket other) {
return Long.compare(term, other.getKeyAsNumber().longValue());
}
@Override
Object getKeyAsObject() {
return getKeyAsNumber();
return Long.compare(term, ((Number) other.getKey()).longValue());
}
@Override

View File

@ -19,12 +19,8 @@
package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.BytesText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
@ -92,13 +88,8 @@ public class StringTerms extends InternalTerms {
}
@Override
public String getKey() {
return termBytes.utf8ToString();
}
@Override
public Text getKeyAsText() {
return new BytesText(new BytesArray(termBytes));
public Object getKey() {
return getKeyAsString();
}
@Override
@ -108,13 +99,13 @@ public class StringTerms extends InternalTerms {
}
@Override
int compareTerm(Terms.Bucket other) {
return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes);
public String getKeyAsString() {
return termBytes.utf8ToString();
}
@Override
Object getKeyAsObject() {
return getKeyAsText();
int compareTerm(Terms.Bucket other) {
return BytesRef.getUTF8SortedAsUnicodeComparator().compare(termBytes, ((Bucket) other).termBytes);
}
@Override

View File

@ -67,7 +67,7 @@ public interface Terms extends MultiBucketsAggregation {
public abstract Number getKeyAsNumber();
abstract int compareTerm(Terms.Bucket other);
public abstract long getDocCountError();
}

View File

@ -78,12 +78,12 @@ public class FieldDataFilterIntegrationTests extends ElasticsearchIntegrationTes
Aggregations aggs = searchResponse.getAggregations();
Terms nameAgg = aggs.get("name");
assertThat(nameAgg.getBuckets().size(), Matchers.equalTo(1));
assertThat(nameAgg.getBuckets().iterator().next().getKey(), Matchers.equalTo("bacon"));
assertThat(nameAgg.getBuckets().iterator().next().getKeyAsString(), Matchers.equalTo("bacon"));
Terms notFilteredAgg = aggs.get("not_filtered");
assertThat(notFilteredAgg.getBuckets().size(), Matchers.equalTo(2));
assertThat(notFilteredAgg.getBuckets().get(0).getKey(), Matchers.isOneOf("bacon", "bastards"));
assertThat(notFilteredAgg.getBuckets().get(1).getKey(), Matchers.isOneOf("bacon", "bastards"));
assertThat(notFilteredAgg.getBuckets().get(0).getKeyAsString(), Matchers.isOneOf("bacon", "bastards"));
assertThat(notFilteredAgg.getBuckets().get(1).getKeyAsString(), Matchers.isOneOf("bacon", "bastards"));
}
}

View File

@ -106,7 +106,7 @@ public class PercolatorFacetsAndAggregationsTests extends ElasticsearchIntegrati
assertThat(aggregations.get(0).getName(), equalTo("a"));
List<Terms.Bucket> buckets = new ArrayList<>(((Terms) aggregations.get(0)).getBuckets());
assertThat(buckets.size(), equalTo(1));
assertThat(buckets.get(0).getKeyAsText().string(), equalTo("b"));
assertThat(buckets.get(0).getKeyAsString(), equalTo("b"));
assertThat(buckets.get(0).getDocCount(), equalTo((long) expectedCount[i % values.length]));
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations;
import com.carrotsearch.hppc.IntIntMap;
import com.carrotsearch.hppc.IntIntOpenHashMap;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
@ -34,7 +35,9 @@ import org.junit.Test;
import java.util.Collection;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.missing;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
@ -101,7 +104,7 @@ public class CombiTests extends ElasticsearchIntegrationTest {
Collection<Terms.Bucket> buckets = terms.getBuckets();
assertThat(buckets.size(), equalTo(values.size()));
for (Terms.Bucket bucket : buckets) {
values.remove(bucket.getKeyAsNumber().intValue());
values.remove(((Number) bucket.getKey()).intValue());
}
assertTrue(values.isEmpty());
}

View File

@ -21,6 +21,8 @@ package org.elasticsearch.search.aggregations;
import com.carrotsearch.hppc.IntOpenHashSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequestBuilder;
@ -33,17 +35,30 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.bucket.range.RangeBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.util.HashMap;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.min;
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles;
import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
import static org.elasticsearch.search.aggregations.AggregationBuilders.stats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsNull.notNullValue;
@ -124,6 +139,12 @@ public class RandomTests extends ElasticsearchIntegrationTest {
SearchResponse resp = reqBuilder.execute().actionGet();
Range range = resp.getAggregations().get("range");
List<? extends Bucket> buckets = range.getBuckets();
HashMap<String, Bucket> bucketMap = Maps.newHashMapWithExpectedSize(buckets.size());
for (Bucket bucket : buckets) {
bucketMap.put(bucket.getKeyAsString(), bucket);
}
for (int i = 0; i < ranges.length; ++i) {
@ -137,8 +158,9 @@ public class RandomTests extends ElasticsearchIntegrationTest {
}
}
final Range.Bucket bucket = range.getBucketByKey(Integer.toString(i));
assertEquals(bucket.getKey(), count, bucket.getDocCount());
final Range.Bucket bucket = bucketMap.get(Integer.toString(i));
assertEquals(bucket.getKeyAsString(), Integer.toString(i), bucket.getKeyAsString());
assertEquals(bucket.getKeyAsString(), count, bucket.getDocCount());
final Filter filter = resp.getAggregations().get("filter" + i);
assertThat(filter.getDocCount(), equalTo(count));
@ -234,10 +256,10 @@ public class RandomTests extends ElasticsearchIntegrationTest {
assertEquals(valuesSet.size(), stringGlobalOrdinalsTerms.getBuckets().size());
assertEquals(valuesSet.size(), stringGlobalOrdinalsDVTerms.getBuckets().size());
for (Terms.Bucket bucket : longTerms.getBuckets()) {
final Terms.Bucket doubleBucket = doubleTerms.getBucketByKey(Double.toString(Long.parseLong(bucket.getKeyAsText().string())));
final Terms.Bucket stringMapBucket = stringMapTerms.getBucketByKey(bucket.getKeyAsText().string());
final Terms.Bucket stringGlobalOrdinalsBucket = stringGlobalOrdinalsTerms.getBucketByKey(bucket.getKeyAsText().string());
final Terms.Bucket stringGlobalOrdinalsDVBucket = stringGlobalOrdinalsDVTerms.getBucketByKey(bucket.getKeyAsText().string());
final Terms.Bucket doubleBucket = doubleTerms.getBucketByKey(Double.toString(Long.parseLong(bucket.getKeyAsString())));
final Terms.Bucket stringMapBucket = stringMapTerms.getBucketByKey(bucket.getKeyAsString());
final Terms.Bucket stringGlobalOrdinalsBucket = stringGlobalOrdinalsTerms.getBucketByKey(bucket.getKeyAsString());
final Terms.Bucket stringGlobalOrdinalsDVBucket = stringGlobalOrdinalsDVTerms.getBucketByKey(bucket.getKeyAsString());
assertNotNull(doubleBucket);
assertNotNull(stringMapBucket);
assertNotNull(stringGlobalOrdinalsBucket);
@ -288,10 +310,10 @@ public class RandomTests extends ElasticsearchIntegrationTest {
Histogram histo = resp.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(terms.getBuckets().size(), equalTo(histo.getBuckets().size()));
for (Terms.Bucket bucket : terms.getBuckets()) {
final long key = bucket.getKeyAsNumber().longValue() * interval;
final Histogram.Bucket histoBucket = histo.getBucketByKey(key);
assertEquals(bucket.getDocCount(), histoBucket.getDocCount());
for (Histogram.Bucket bucket : histo.getBuckets()) {
final double key = ((Number) bucket.getKey()).doubleValue() / interval;
final Terms.Bucket termsBucket = terms.getBucketByKey(String.valueOf(key));
assertEquals(bucket.getDocCount(), termsBucket.getDocCount());
}
}
@ -336,22 +358,23 @@ public class RandomTests extends ElasticsearchIntegrationTest {
Range range = filter.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
assertThat(range.getBuckets().size(), equalTo(2));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(buckets.size(), equalTo(2));
Range.Bucket bucket = range.getBucketByKey("*-6.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("*-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getDocCount(), equalTo(value < 6 ? 1L : 0L));
Sum sum = bucket.getAggregations().get("sum");
assertEquals(value < 6 ? value : 0, sum.getValue(), 0d);
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getDocCount(), equalTo(value >= 6 ? 1L : 0L));
sum = bucket.getAggregations().get("sum");
assertEquals(value >= 6 ? value : 0, sum.getValue(), 0d);

View File

@ -30,7 +30,12 @@ import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
@ -130,7 +135,7 @@ public class ChildrenTests extends ElasticsearchIntegrationTest {
assertThat(categoryTerms.getBuckets().size(), equalTo(categoryToControl.size()));
for (Map.Entry<String, Control> entry1 : categoryToControl.entrySet()) {
Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry1.getKey());
assertThat(categoryBucket.getKey(), equalTo(entry1.getKey()));
assertThat(categoryBucket.getKeyAsString(), equalTo(entry1.getKey()));
assertThat(categoryBucket.getDocCount(), equalTo((long) entry1.getValue().articleIds.size()));
Children childrenBucket = categoryBucket.getAggregations().get("to_comment");
@ -143,7 +148,7 @@ public class ChildrenTests extends ElasticsearchIntegrationTest {
assertThat(commentersTerms.getBuckets().size(), equalTo(entry1.getValue().commenterToCommentId.size()));
for (Map.Entry<String, Set<String>> entry2 : entry1.getValue().commenterToCommentId.entrySet()) {
Terms.Bucket commentBucket = commentersTerms.getBucketByKey(entry2.getKey());
assertThat(commentBucket.getKey(), equalTo(entry2.getKey()));
assertThat(commentBucket.getKeyAsString(), equalTo(entry2.getKey()));
assertThat(commentBucket.getDocCount(), equalTo((long) entry2.getValue().size()));
TopHits topHits = commentBucket.getAggregations().get("top_comments");
@ -179,7 +184,7 @@ public class ChildrenTests extends ElasticsearchIntegrationTest {
}
Terms.Bucket categoryBucket = categoryTerms.getBucketByKey("a");
assertThat(categoryBucket.getKey(), equalTo("a"));
assertThat(categoryBucket.getKeyAsString(), equalTo("a"));
assertThat(categoryBucket.getDocCount(), equalTo(3l));
Children childrenBucket = categoryBucket.getAggregations().get("to_comment");
@ -195,7 +200,7 @@ public class ChildrenTests extends ElasticsearchIntegrationTest {
assertThat(topHits.getHits().getAt(1).getType(), equalTo("comment"));
categoryBucket = categoryTerms.getBucketByKey("b");
assertThat(categoryBucket.getKey(), equalTo("b"));
assertThat(categoryBucket.getKeyAsString(), equalTo("b"));
assertThat(categoryBucket.getDocCount(), equalTo(2l));
childrenBucket = categoryBucket.getAggregations().get("to_comment");
@ -207,7 +212,7 @@ public class ChildrenTests extends ElasticsearchIntegrationTest {
assertThat(topHits.getHits().getAt(0).getType(), equalTo("comment"));
categoryBucket = categoryTerms.getBucketByKey("c");
assertThat(categoryBucket.getKey(), equalTo("c"));
assertThat(categoryBucket.getKeyAsString(), equalTo("c"));
assertThat(categoryBucket.getDocCount(), equalTo(2l));
childrenBucket = categoryBucket.getAggregations().get("to_comment");

View File

@ -24,21 +24,23 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.transport.AssertingLocalTransport;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.After;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsNull.notNullValue;
/**
* The serialisation of pre and post offsets for the date histogram aggregation was corrected in version 1.4 to allow negative offsets and as such the
@ -81,22 +83,28 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
.addAggregation(dateHistogram("date_histo")
.field("date")
.preOffset("-2h")
.interval(DateHistogram.Interval.DAY)
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
DateHistogram histo = response.getAggregations().get("date_histo");
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-10");
assertThat(bucket, Matchers.notNullValue());
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = histo.getBucketByKey("2014-03-11");
assertThat(bucket, Matchers.notNullValue());
key = new DateTime(2014, 3, 11, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
}
@ -117,22 +125,28 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
.field("date")
.preOffset("-2h")
.minDocCount(0)
.interval(DateHistogram.Interval.DAY)
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
DateHistogram histo = response.getAggregations().get("date_histo");
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-10");
assertThat(bucket, Matchers.notNullValue());
DateTime key = new DateTime(2014, 3, 10, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-10"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = histo.getBucketByKey("2014-03-11");
assertThat(bucket, Matchers.notNullValue());
key = new DateTime(2014, 3, 11, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-11"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(3l));
}
@ -152,22 +166,28 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
.addAggregation(dateHistogram("date_histo")
.field("date")
.postOffset("2d")
.interval(DateHistogram.Interval.DAY)
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
DateHistogram histo = response.getAggregations().get("date_histo");
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-13");
assertThat(bucket, Matchers.notNullValue());
DateTime key = new DateTime(2014, 3, 13, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-13"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(4l));
bucket = histo.getBucketByKey("2014-03-14");
assertThat(bucket, Matchers.notNullValue());
key = new DateTime(2014, 3, 14, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-14"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
}
@ -188,22 +208,28 @@ public class DateHistogramOffsetTests extends ElasticsearchIntegrationTest {
.field("date")
.postOffset("2d")
.minDocCount(0)
.interval(DateHistogram.Interval.DAY)
.interval(DateHistogramInterval.DAY)
.format("yyyy-MM-dd"))
.execute().actionGet();
assertThat(response.getHits().getTotalHits(), equalTo(5l));
DateHistogram histo = response.getAggregations().get("date_histo");
Collection<? extends DateHistogram.Bucket> buckets = histo.getBuckets();
Histogram histo = response.getAggregations().get("date_histo");
List<? extends Histogram.Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(2));
DateHistogram.Bucket bucket = histo.getBucketByKey("2014-03-13");
assertThat(bucket, Matchers.notNullValue());
DateTime key = new DateTime(2014, 3, 13, 0, 0, DateTimeZone.UTC);
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-13"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(4l));
bucket = histo.getBucketByKey("2014-03-14");
assertThat(bucket, Matchers.notNullValue());
key = new DateTime(2014, 3, 14, 0, 0, DateTimeZone.UTC);
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsString(), equalTo("2014-03-14"));
assertThat(((DateTime) bucket.getKey()), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(1l));
}
}

View File

@ -220,7 +220,7 @@ public class DoubleTermsTests extends AbstractTermsTests {
}
private String key(Terms.Bucket bucket) {
return randomBoolean() ? bucket.getKey() : bucket.getKeyAsText().string();
return bucket.getKeyAsString();
}
@Test
@ -409,7 +409,7 @@ public class DoubleTermsTests extends AbstractTermsTests {
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat((long) sum.getValue(), equalTo(i+i+1l));
assertThat((String) propertiesKeys[i], equalTo(String.valueOf((double) i)));
assertThat((double) propertiesKeys[i], equalTo((double) i));
assertThat((long) propertiesDocCounts[i], equalTo(1l));
assertThat((double) propertiesCounts[i], equalTo((double) i + i + 1l));
}
@ -465,7 +465,7 @@ public class DoubleTermsTests extends AbstractTermsTests {
Terms.Bucket bucket = terms.getBucketByKey("" + (i + 1d));
assertThat(bucket, notNullValue());
assertThat(key(bucket), equalTo("" + (i+1d)));
assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i+1));
assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i + 1));
assertThat(bucket.getDocCount(), equalTo(1l));
}
}
@ -520,7 +520,7 @@ public class DoubleTermsTests extends AbstractTermsTests {
Terms.Bucket bucket = terms.getBucketByKey("" + (i + 1d));
assertThat(bucket, notNullValue());
assertThat(key(bucket), equalTo("" + (i+1d)));
assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i+1));
assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i + 1));
if (i == 0 || i == 5) {
assertThat(bucket.getDocCount(), equalTo(1l));
} else {
@ -592,7 +592,7 @@ public class DoubleTermsTests extends AbstractTermsTests {
Terms.Bucket bucket = terms.getBucketByKey("" + (i + 1d));
assertThat(bucket, notNullValue());
assertThat(key(bucket), equalTo("" + (i+1d)));
assertThat(bucket.getKeyAsNumber().doubleValue(), equalTo(i+1d));
assertThat(bucket.getKeyAsNumber().doubleValue(), equalTo(i + 1d));
final long count = i == 0 || i == 5 ? 1 : 2;
double s = 0;
for (int j = 0; j < NUM_DOCS; ++j) {
@ -807,7 +807,7 @@ public class DoubleTermsTests extends AbstractTermsTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Terms terms = bucket.getAggregations().get("terms");

View File

@ -178,7 +178,7 @@ public class FilterTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Filter filter = bucket.getAggregations().get("filter");

View File

@ -232,14 +232,14 @@ public class FiltersTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Filters filters = bucket.getAggregations().get("filters");
assertThat(filters, notNullValue());
Filters.Bucket all = filters.getBucketByKey("all");
assertThat(all, Matchers.notNullValue());
assertThat(all.getKey(), equalTo("all"));
assertThat(all.getKeyAsString(), equalTo("all"));
assertThat(all.getDocCount(), is(0l));
}

View File

@ -26,7 +26,8 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistance;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.hamcrest.Matchers;
@ -137,34 +138,35 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
GeoDistance geoDist = response.getAggregations().get("amsterdam_rings");
Range geoDist = response.getAggregations().get("amsterdam_rings");
assertThat(geoDist, notNullValue());
assertThat(geoDist.getName(), equalTo("amsterdam_rings"));
assertThat(geoDist.getBuckets().size(), equalTo(3));
List<? extends Bucket> buckets = geoDist.getBuckets();
assertThat(buckets.size(), equalTo(3));
GeoDistance.Bucket bucket = geoDist.getBucketByKey("*-500.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-500.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(0.0));
assertThat(bucket.getTo().doubleValue(), equalTo(500.0));
assertThat((String) (String) bucket.getKey(), equalTo("*-500.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("500.0-1000.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(500.0));
assertThat(bucket.getTo().doubleValue(), equalTo(1000.0));
assertThat((String) (String) bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(500.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("1000.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("1000.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(1000.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("1000.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(1l));
@ -185,34 +187,35 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
GeoDistance geoDist = response.getAggregations().get("amsterdam_rings");
Range geoDist = response.getAggregations().get("amsterdam_rings");
assertThat(geoDist, notNullValue());
assertThat(geoDist.getName(), equalTo("amsterdam_rings"));
assertThat(geoDist.getBuckets().size(), equalTo(3));
List<? extends Bucket> buckets = geoDist.getBuckets();
assertThat(buckets.size(), equalTo(3));
GeoDistance.Bucket bucket = geoDist.getBucketByKey("ring1");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("ring1"));
assertThat(bucket.getFrom().doubleValue(), equalTo(0.0));
assertThat(bucket.getTo().doubleValue(), equalTo(500.0));
assertThat((String) bucket.getKey(), equalTo("ring1"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("ring2");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("ring2"));
assertThat(bucket.getFrom().doubleValue(), equalTo(500.0));
assertThat(bucket.getTo().doubleValue(), equalTo(1000.0));
assertThat((String) bucket.getKey(), equalTo("ring2"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(500.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("ring3");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("ring3"));
assertThat(bucket.getFrom().doubleValue(), equalTo(1000.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("ring3"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(1l));
@ -235,34 +238,35 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
GeoDistance geoDist = response.getAggregations().get("amsterdam_rings");
Range geoDist = response.getAggregations().get("amsterdam_rings");
assertThat(geoDist, notNullValue());
assertThat(geoDist.getName(), equalTo("amsterdam_rings"));
List<? extends Bucket> buckets = geoDist.getBuckets();
assertThat(geoDist.getBuckets().size(), equalTo(3));
GeoDistance.Bucket bucket = geoDist.getBucketByKey("*-500.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-500.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(0.0));
assertThat(bucket.getTo().doubleValue(), equalTo(500.0));
assertThat((String) bucket.getKey(), equalTo("*-500.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = geoDist.getBucketByKey("500.0-1000.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(500.0));
assertThat(bucket.getTo().doubleValue(), equalTo(1000.0));
assertThat((String) bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(500.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = geoDist.getBucketByKey("1000.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("1000.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(1000.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("1000.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0l));
@ -283,34 +287,35 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
GeoDistance geoDist = response.getAggregations().get("amsterdam_rings");
Range geoDist = response.getAggregations().get("amsterdam_rings");
assertThat(geoDist, notNullValue());
assertThat(geoDist.getName(), equalTo("amsterdam_rings"));
List<? extends Bucket> buckets = geoDist.getBuckets();
assertThat(geoDist.getBuckets().size(), equalTo(3));
GeoDistance.Bucket bucket = geoDist.getBucketByKey("*-500.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-500.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(0.0));
assertThat(bucket.getTo().doubleValue(), equalTo(500.0));
assertThat((String) bucket.getKey(), equalTo("*-500.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("500.0-1000.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(500.0));
assertThat(bucket.getTo().doubleValue(), equalTo(1000.0));
assertThat((String) bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(500.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("1000.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("1000.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(1000.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("1000.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(1l));
@ -334,19 +339,20 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
GeoDistance geoDist = response.getAggregations().get("amsterdam_rings");
Range geoDist = response.getAggregations().get("amsterdam_rings");
assertThat(geoDist, notNullValue());
assertThat(geoDist.getName(), equalTo("amsterdam_rings"));
List<? extends Bucket> buckets = geoDist.getBuckets();
assertThat(geoDist.getBuckets().size(), equalTo(3));
Object[] propertiesKeys = (Object[]) geoDist.getProperty("_key");
Object[] propertiesDocCounts = (Object[]) geoDist.getProperty("_count");
Object[] propertiesCities = (Object[]) geoDist.getProperty("cities");
GeoDistance.Bucket bucket = geoDist.getBucketByKey("*-500.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-500.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(0.0));
assertThat(bucket.getTo().doubleValue(), equalTo(500.0));
assertThat((String) bucket.getKey(), equalTo("*-500.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
@ -355,18 +361,18 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertThat(cities, Matchers.notNullValue());
Set<String> names = Sets.newHashSet();
for (Terms.Bucket city : cities.getBuckets()) {
names.add(city.getKey());
names.add(city.getKeyAsString());
}
assertThat(names.contains("utrecht") && names.contains("haarlem"), is(true));
assertThat((String) propertiesKeys[0], equalTo("*-500.0"));
assertThat((long) propertiesDocCounts[0], equalTo(2l));
assertThat((Terms) propertiesCities[0], sameInstance(cities));
bucket = geoDist.getBucketByKey("500.0-1000.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(500.0));
assertThat(bucket.getTo().doubleValue(), equalTo(1000.0));
assertThat((String) bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(500.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
@ -375,18 +381,18 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertThat(cities, Matchers.notNullValue());
names = Sets.newHashSet();
for (Terms.Bucket city : cities.getBuckets()) {
names.add(city.getKey());
names.add(city.getKeyAsString());
}
assertThat(names.contains("berlin") && names.contains("prague"), is(true));
assertThat((String) propertiesKeys[1], equalTo("500.0-1000.0"));
assertThat((long) propertiesDocCounts[1], equalTo(2l));
assertThat((Terms) propertiesCities[1], sameInstance(cities));
bucket = geoDist.getBucketByKey("1000.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("1000.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(1000.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("1000.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(1l));
@ -395,7 +401,7 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertThat(cities, Matchers.notNullValue());
names = Sets.newHashSet();
for (Terms.Bucket city : cities.getBuckets()) {
names.add(city.getKey());
names.add(city.getKeyAsString());
}
assertThat(names.contains("tel-aviv"), is(true));
assertThat((String) propertiesKeys[2], equalTo("1000.0-*"));
@ -414,17 +420,17 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
GeoDistance geoDistance = bucket.getAggregations().get("geo_dist");
List<GeoDistance.Bucket> buckets = new ArrayList<>(geoDistance.getBuckets());
Range geoDistance = bucket.getAggregations().get("geo_dist");
List<Range.Bucket> buckets = new ArrayList<>(geoDistance.getBuckets());
assertThat(geoDistance, Matchers.notNullValue());
assertThat(geoDistance.getName(), equalTo("geo_dist"));
assertThat(buckets.size(), is(1));
assertThat(buckets.get(0).getKey(), equalTo("0-100"));
assertThat(buckets.get(0).getFrom().doubleValue(), equalTo(0.0));
assertThat(buckets.get(0).getTo().doubleValue(), equalTo(100.0));
assertThat((String) buckets.get(0).getKey(), equalTo("0-100"));
assertThat(((Number) buckets.get(0).getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) buckets.get(0).getTo()).doubleValue(), equalTo(100.0));
assertThat(buckets.get(0).getFromAsString(), equalTo("0.0"));
assertThat(buckets.get(0).getToAsString(), equalTo("100.0"));
assertThat(buckets.get(0).getDocCount(), equalTo(0l));
@ -445,34 +451,35 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
GeoDistance geoDist = response.getAggregations().get("amsterdam_rings");
Range geoDist = response.getAggregations().get("amsterdam_rings");
assertThat(geoDist, notNullValue());
assertThat(geoDist.getName(), equalTo("amsterdam_rings"));
List<? extends Bucket> buckets = geoDist.getBuckets();
assertThat(geoDist.getBuckets().size(), equalTo(3));
GeoDistance.Bucket bucket = geoDist.getBucketByKey("*-500.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-500.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(0.0));
assertThat(bucket.getTo().doubleValue(), equalTo(500.0));
assertThat((String) bucket.getKey(), equalTo("*-500.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(500.0));
assertThat(bucket.getFromAsString(), equalTo("0.0"));
assertThat(bucket.getToAsString(), equalTo("500.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("500.0-1000.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(500.0));
assertThat(bucket.getTo().doubleValue(), equalTo(1000.0));
assertThat((String) bucket.getKey(), equalTo("500.0-1000.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(500.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(1000.0));
assertThat(bucket.getFromAsString(), equalTo("500.0"));
assertThat(bucket.getToAsString(), equalTo("1000.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = geoDist.getBucketByKey("1000.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("1000.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(1000.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("1000.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(1l));

View File

@ -25,6 +25,7 @@ import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.GeoBoundingBoxFilterBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@ -148,14 +149,15 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
Object[] propertiesDocCounts = (Object[]) geoGrid.getProperty("_count");
for (int i = 0; i < buckets.size(); i++) {
GeoHashGrid.Bucket cell = buckets.get(i);
String geohash = cell.getKey();
String geohash = cell.getKeyAsString();
long bucketCount = cell.getDocCount();
int expectedBucketCount = expectedDocCountsForGeoHash.get(geohash);
assertNotSame(bucketCount, 0);
assertEquals("Geohash " + geohash + " has wrong doc count ",
expectedBucketCount, bucketCount);
assertThat((String) propertiesKeys[i], equalTo(geohash));
GeoPoint geoPoint = (GeoPoint) propertiesKeys[i];
assertThat(GeoHashUtils.encode(geoPoint.lat(), geoPoint.lon(), precision), equalTo(geohash));
assertThat((long) propertiesDocCounts[i], equalTo(bucketCount));
}
}
@ -175,7 +177,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
GeoHashGrid geoGrid = response.getAggregations().get("geohashgrid");
for (GeoHashGrid.Bucket cell : geoGrid.getBuckets()) {
String geohash = cell.getKey();
String geohash = cell.getKeyAsString();
long bucketCount = cell.getDocCount();
int expectedBucketCount = multiValuedExpectedDocCountsForGeoHash.get(geohash);
@ -208,7 +210,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
GeoHashGrid geoGrid = filter.getAggregations().get("geohashgrid");
for (GeoHashGrid.Bucket cell : geoGrid.getBuckets()) {
String geohash = cell.getKey();
String geohash = cell.getKeyAsString();
long bucketCount = cell.getDocCount();
int expectedBucketCount = expectedDocCountsForGeoHash.get(geohash);
assertNotSame(bucketCount, 0);
@ -252,7 +254,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
GeoHashGrid geoGrid = response.getAggregations().get("geohashgrid");
for (GeoHashGrid.Bucket cell : geoGrid.getBuckets()) {
String geohash = cell.getKey();
String geohash = cell.getKeyAsString();
long bucketCount = cell.getDocCount();
int expectedBucketCount = expectedDocCountsForGeoHash.get(geohash);
@ -281,7 +283,7 @@ public class GeoHashGridTests extends ElasticsearchIntegrationTest {
//Check we only have one bucket with the best match for that resolution
assertThat(geoGrid.getBuckets().size(), equalTo(1));
for (GeoHashGrid.Bucket cell : geoGrid.getBuckets()) {
String geohash = cell.getKey();
String geohash = cell.getKeyAsString();
long bucketCount = cell.getDocCount();
int expectedBucketCount = 0;
for (ObjectIntCursor<String> cursor : expectedDocCountsForGeoHash) {

View File

@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
@ -128,12 +129,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
}
@ -151,12 +153,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey((i + preOffsetMultiplier) * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) (i + preOffsetMultiplier) * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) (i + preOffsetMultiplier) * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
}
@ -174,12 +177,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey((i + postOffsetMultiplier) * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) (i + postOffsetMultiplier) * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) (i + postOffsetMultiplier) * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
}
@ -202,7 +206,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
}
@ -225,7 +229,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(numValueBuckets - i - 1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
}
@ -250,7 +254,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = histoBuckets.get(i);
assertThat(bucket, notNullValue());
long key = bucket.getKeyAsNumber().longValue();
long key = ((Number) bucket.getKey()).longValue();
assertEquals(0, key % interval);
assertTrue(buckets.add(key));
assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)]));
@ -279,7 +283,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = histoBuckets.get(i);
assertThat(bucket, notNullValue());
long key = bucket.getKeyAsNumber().longValue();
long key = ((Number) bucket.getKey()).longValue();
assertEquals(0, key % interval);
assertTrue(buckets.add(key));
assertThat(bucket.getDocCount(), equalTo(valueCounts[(int) (key / interval)]));
@ -310,7 +314,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
Sum sum = bucket.getAggregations().get("sum");
@ -322,7 +326,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
}
}
assertThat(sum.getValue(), equalTo((double) s));
assertThat((String) propertiesKeys[i], equalTo(String.valueOf((long) i * interval)));
assertThat((long) propertiesKeys[i], equalTo((long) i * interval));
assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i]));
assertThat((double) propertiesCounts[i], equalTo((double) s));
}
@ -347,7 +351,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
Sum sum = bucket.getAggregations().get("sum");
@ -383,7 +387,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
long key = bucket.getKeyAsNumber().longValue();
long key = ((Number) bucket.getKey()).longValue();
assertTrue(visited.add(key));
int b = (int) (key / interval);
assertThat(bucket.getDocCount(), equalTo(valueCounts[b]));
@ -423,7 +427,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
long key = bucket.getKeyAsNumber().longValue();
long key = ((Number) bucket.getKey()).longValue();
assertTrue(visited.add(key));
int b = (int) (key / interval);
assertThat(bucket.getDocCount(), equalTo(valueCounts[b]));
@ -463,7 +467,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
long key = bucket.getKeyAsNumber().longValue();
long key = ((Number) bucket.getKey()).longValue();
assertTrue(visited.add(key));
int b = (int) (key / interval);
assertThat(bucket.getDocCount(), equalTo(valueCounts[b]));
@ -503,7 +507,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
long key = bucket.getKeyAsNumber().longValue();
long key = ((Number) bucket.getKey()).longValue();
assertTrue(visited.add(key));
int b = (int) (key / interval);
assertThat(bucket.getDocCount(), equalTo(valueCounts[b]));
@ -545,7 +549,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
long key = bucket.getKeyAsNumber().longValue();
long key = ((Number) bucket.getKey()).longValue();
assertTrue(visited.add(key));
int b = (int) (key / interval);
assertThat(bucket.getDocCount(), equalTo(valueCounts[b]));
@ -578,12 +582,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numBuckets));
for (int i = 2 / interval; i <= (numDocs + 1) / interval; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(counts[i]));
}
}
@ -600,12 +605,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValuesBuckets));
for (int i = 0; i < numValuesBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valuesCounts[i]));
}
}
@ -628,7 +634,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValuesBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(numValuesBuckets - i - 1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valuesCounts[i]));
}
}
@ -656,12 +662,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numBuckets));
for (int i = 2 / interval; i <= (numDocs + 2) / interval; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(counts[i]));
}
}
@ -691,12 +698,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numBuckets));
for (int i = 2 / interval; i < (numDocs + 2) / interval; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(counts[i]));
Terms terms = bucket.getAggregations().get(MULTI_VALUED_FIELD_NAME);
assertThat(terms, notNullValue());
@ -723,12 +731,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
}
@ -752,7 +761,7 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
Sum sum = bucket.getAggregations().get("sum");
@ -779,12 +788,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValuesBuckets));
for (int i = 0; i < numValuesBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valuesCounts[i]));
}
}
@ -802,12 +812,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValuesBuckets));
for (int i = 0; i < numValuesBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valuesCounts[i]));
assertThat(bucket.getAggregations().asList().isEmpty(), is(false));
Sum sum = bucket.getAggregations().get("sum");
@ -850,12 +861,13 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(numValueBuckets));
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = histo.getBucketByKey(i * interval);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo((long) i * interval));
assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
}
}
@ -871,7 +883,8 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
List<? extends Bucket> buckets = histo.getBuckets();
Histogram.Bucket bucket = buckets.get(1);
assertThat(bucket, Matchers.notNullValue());
histo = bucket.getAggregations().get("sub_histo");
@ -944,13 +957,14 @@ public class HistogramTests extends ElasticsearchIntegrationTest {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(bucketsCount));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(bucketsCount));
long key = Math.min(boundsMinKey, 0);
for (int i = 0; i < bucketsCount; i++) {
Histogram.Bucket bucket = histo.getBucketByKey(key);
Histogram.Bucket bucket = buckets.get(i);
assertThat(bucket, notNullValue());
assertThat(bucket.getKeyAsNumber().longValue(), equalTo(key));
assertThat(((Number) bucket.getKey()).longValue(), equalTo(key));
assertThat(bucket.getDocCount(), equalTo(extendedValueCounts[i]));
key += interval;
}

View File

@ -22,7 +22,8 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4Range;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
@ -97,35 +98,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
assertThat(range.getBuckets().size(), equalTo(3));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(buckets.size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) (String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) ((Number) bucket.getFrom())).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) ((Number) bucket.getTo())).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) (String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) ((Number) bucket.getFrom())).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) ((Number) bucket.getTo())).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) (String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@ -142,26 +144,27 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(2));
IPv4Range.Bucket bucket = range.getBucketByKey("10.0.0.0/25");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.0/25"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.0")));
assertThat((String) bucket.getKey(), equalTo("10.0.0.0/25"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.0")));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.0"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.128")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.128")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.128"));
assertThat(bucket.getDocCount(), equalTo(128l));
bucket = range.getBucketByKey("10.0.0.128/25");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.128/25"));
assertThat((long) bucket.getFrom().doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.128")));
assertThat((String) bucket.getKey(), equalTo("10.0.0.128/25"));
assertThat((long) ((Number) bucket.getFrom()).doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.128")));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.128"));
assertThat((long) bucket.getTo().doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.1.0"))); // range is exclusive on the to side
assertThat((long) ((Number) bucket.getTo()).doubleValue(), equalTo(IpFieldMapper.ipToLong("10.0.1.0"))); // range is exclusive on the to side
assertThat(bucket.getToAsString(), equalTo("10.0.1.0"));
assertThat(bucket.getDocCount(), equalTo(127l)); // include 10.0.0.128
}
@ -179,35 +182,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("r1");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r1"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("r1"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("r2");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r2"));
assertThat((String) bucket.getKey(), equalTo("r2"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("r3");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r3"));
assertThat((String) bucket.getKey(), equalTo("r3"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@ -226,21 +230,22 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Object[] propertiesKeys = (Object[]) range.getProperty("_key");
Object[] propertiesDocCounts = (Object[]) range.getProperty("_count");
Object[] propertiesCounts = (Object[]) range.getProperty("sum.value");
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
@ -249,13 +254,13 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertThat((long) propertiesDocCounts[0], equalTo(100l));
assertThat((double) propertiesCounts[0], equalTo((double) 100));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
@ -264,12 +269,12 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertThat((long) propertiesDocCounts[1], equalTo(100l));
assertThat((double) propertiesCounts[1], equalTo((double) 200));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
sum = bucket.getAggregations().get("sum");
@ -294,41 +299,42 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.99")));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.199")));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
max = bucket.getAggregations().get("max");
@ -350,35 +356,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@ -413,35 +420,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
@ -460,35 +468,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
@ -508,41 +517,42 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.100")));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.200")));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
max = bucket.getAggregations().get("max");
@ -563,35 +573,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@ -610,41 +621,42 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.99")));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
max = bucket.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.199")));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
max = bucket.getAggregations().get("max");
@ -665,35 +677,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
}
@ -712,41 +725,42 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
Max max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.100")));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(101l));
max = bucket.getAggregations().get("max");
assertThat(max, Matchers.notNullValue());
assertThat((long) max.getValue(), equalTo(IpFieldMapper.ipToLong("10.0.0.200")));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(56l));
max = bucket.getAggregations().get("max");
@ -767,35 +781,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0l));
}
@ -813,35 +828,36 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
IPv4Range.Bucket bucket = range.getBucketByKey("*-10.0.0.100");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("*-10.0.0.100"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.100-10.0.0.200");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.100-10.0.0.200"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.100"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.100")));
assertThat(bucket.getToAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getTo().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getDocCount(), equalTo(100l));
bucket = range.getBucketByKey("10.0.0.200-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat((String) bucket.getKey(), equalTo("10.0.0.200-*"));
assertThat(bucket.getFromAsString(), equalTo("10.0.0.200"));
assertThat(bucket.getFrom().doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo((double) IpFieldMapper.ipToLong("10.0.0.200")));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(55l));
}
@ -857,15 +873,15 @@ public class IPv4RangeTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
IPv4Range range = bucket.getAggregations().get("ip_range");
List<IPv4Range.Bucket> buckets = new ArrayList<>(range.getBuckets());
Range range = bucket.getAggregations().get("ip_range");
List<Range.Bucket> buckets = new ArrayList<>(range.getBuckets());
assertThat(range, Matchers.notNullValue());
assertThat(range.getName(), equalTo("ip_range"));
assertThat(buckets.size(), is(1));
assertThat(buckets.get(0).getKey(), equalTo("r1"));
assertThat((String) buckets.get(0).getKey(), equalTo("r1"));
assertThat(buckets.get(0).getFromAsString(), equalTo("10.0.0.1"));
assertThat(buckets.get(0).getToAsString(), equalTo("10.0.0.10"));
assertThat(buckets.get(0).getDocCount(), equalTo(0l));

View File

@ -223,7 +223,7 @@ public class LongTermsTests extends AbstractTermsTests {
}
private String key(Terms.Bucket bucket) {
return randomBoolean() ? bucket.getKey() : key(bucket);
return bucket.getKeyAsString();
}
@Test
@ -409,7 +409,7 @@ public class LongTermsTests extends AbstractTermsTests {
Sum sum = bucket.getAggregations().get("sum");
assertThat(sum, notNullValue());
assertThat((long) sum.getValue(), equalTo(i+i+1l));
assertThat((String) propertiesKeys[i], equalTo(String.valueOf(i)));
assertThat((long) propertiesKeys[i], equalTo((long) i));
assertThat((long) propertiesDocCounts[i], equalTo(1l));
assertThat((double) propertiesCounts[i], equalTo((double) i + i + 1l));
}
@ -805,7 +805,7 @@ public class LongTermsTests extends AbstractTermsTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Terms terms = bucket.getAggregations().get("terms");

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.LongOpenHashSet;
import com.carrotsearch.hppc.LongSet;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
@ -29,21 +30,26 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import java.util.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
@ -116,13 +122,12 @@ public class MinDocCountTests extends AbstractTermsTests {
int size2 = 0;
while (it1.hasNext()) {
final Terms.Bucket bucket1 = it1.next();
if (bucket1.getDocCount() >= minDocCount && (matcher == null || matcher.reset(bucket1.getKey()).matches())) {
if (bucket1.getDocCount() >= minDocCount && (matcher == null || matcher.reset(bucket1.getKeyAsString()).matches())) {
if (size2++ == size) {
break;
}
assertTrue(it2.hasNext());
final Terms.Bucket bucket2 = it2.next();
assertEquals(bucket1.getKeyAsText(), bucket2.getKeyAsText());
assertEquals(bucket1.getDocCount(), bucket2.getDocCount());
}
}
@ -134,18 +139,7 @@ public class MinDocCountTests extends AbstractTermsTests {
for (Histogram.Bucket b1 : histo1.getBuckets()) {
if (b1.getDocCount() >= minDocCount) {
final Histogram.Bucket b2 = it2.next();
assertEquals(b1.getKeyAsNumber(), b2.getKeyAsNumber());
assertEquals(b1.getDocCount(), b2.getDocCount());
}
}
}
private void assertSubset(DateHistogram histo1, DateHistogram histo2, long minDocCount) {
final Iterator<? extends DateHistogram.Bucket> it2 = histo2.getBuckets().iterator();
for (DateHistogram.Bucket b1 : histo1.getBuckets()) {
if (b1.getDocCount() >= minDocCount) {
final DateHistogram.Bucket b2 = it2.next();
assertEquals(b1.getKeyAsNumber(), b2.getKeyAsNumber());
assertEquals(b1.getKey(), b2.getKey());
assertEquals(b1.getDocCount(), b2.getDocCount());
}
}
@ -378,18 +372,18 @@ public class MinDocCountTests extends AbstractTermsTests {
final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type")
.setSearchType(SearchType.COUNT)
.setQuery(QUERY)
.addAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).order(order).minDocCount(0))
.addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).order(order).minDocCount(0))
.execute().actionGet();
final DateHistogram allHisto = allResponse.getAggregations().get("histo");
final Histogram allHisto = allResponse.getAggregations().get("histo");
for (long minDocCount = 0; minDocCount < 50; ++minDocCount) {
final SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setSearchType(SearchType.COUNT)
.setQuery(QUERY)
.addAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).order(order).minDocCount(minDocCount))
.addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).order(order).minDocCount(minDocCount))
.execute().actionGet();
assertSubset(allHisto, (DateHistogram) response.getAggregations().get("histo"), minDocCount);
assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount);
}
}

View File

@ -199,7 +199,7 @@ public class MissingTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Missing missing = bucket.getAggregations().get("missing");

View File

@ -46,9 +46,21 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.nested;
import static org.elasticsearch.search.aggregations.AggregationBuilders.stats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
import static org.hamcrest.core.IsNull.notNullValue;
/**
@ -335,7 +347,7 @@ public class NestedTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Nested nested = bucket.getAggregations().get("nested");

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
@ -103,7 +104,10 @@ public class RangeTests extends ElasticsearchIntegrationTest {
final long docCount = i == 1 || i == numDocs + 1 ? 1 : 2;
assertThat(bucket.getDocCount(), equalTo(docCount));
Range range = bucket.getAggregations().get("range");
Range.Bucket rangeBucket = range.getBucketByKey("*-3.0");
List<? extends Bucket> buckets = range.getBuckets();
Range.Bucket rangeBucket = buckets.get(0);
assertThat((String) rangeBucket.getKey(), equalTo("*-3.0"));
assertThat(rangeBucket.getKeyAsString(), equalTo("*-3.0"));
assertThat(rangeBucket, notNullValue());
assertThat(rangeBucket.getFromAsString(), nullValue());
assertThat(rangeBucket.getToAsString(), equalTo("3.0"));
@ -114,7 +118,9 @@ public class RangeTests extends ElasticsearchIntegrationTest {
} else {
assertThat(rangeBucket.getDocCount(), equalTo(0L));
}
rangeBucket = range.getBucketByKey("3.0-6.0");
rangeBucket = buckets.get(1);
assertThat((String) rangeBucket.getKey(), equalTo("3.0-6.0"));
assertThat(rangeBucket.getKeyAsString(), equalTo("3.0-6.0"));
assertThat(rangeBucket, notNullValue());
assertThat(rangeBucket.getFromAsString(), equalTo("3.0"));
assertThat(rangeBucket.getToAsString(), equalTo("6.0"));
@ -125,7 +131,9 @@ public class RangeTests extends ElasticsearchIntegrationTest {
} else {
assertThat(rangeBucket.getDocCount(), equalTo(0L));
}
rangeBucket = range.getBucketByKey("6.0-*");
rangeBucket = buckets.get(2);
assertThat((String) rangeBucket.getKey(), equalTo("6.0-*"));
assertThat(rangeBucket.getKeyAsString(), equalTo("6.0-*"));
assertThat(rangeBucket, notNullValue());
assertThat(rangeBucket.getFromAsString(), equalTo("6.0"));
assertThat(rangeBucket.getToAsString(), nullValue());
@ -155,31 +163,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
assertThat(range.getBuckets().size(), equalTo(3));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(buckets.size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) ((Number) bucket.getFrom())).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) ((Number) bucket.getTo())).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
@ -203,31 +212,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("3-6");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3-6"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3-6"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3"));
assertThat(bucket.getToAsString(), equalTo("6"));
assertThat(bucket.getDocCount(), equalTo(3l));
bucket = range.getBucketByKey("6-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
@ -249,31 +259,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("r1");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r1"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("r1"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("r2");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r2"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("r2"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l));
bucket = range.getBucketByKey("r3");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r3"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("r3"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5L));
@ -296,16 +307,17 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Object[] propertiesKeys = (Object[]) range.getProperty("_key");
Object[] propertiesDocCounts = (Object[]) range.getProperty("_count");
Object[] propertiesCounts = (Object[]) range.getProperty("sum.value");
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
@ -316,11 +328,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat((long) propertiesDocCounts[0], equalTo(2l));
assertThat((double) propertiesCounts[0], equalTo(3.0));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l));
@ -331,11 +343,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat((long) propertiesDocCounts[1], equalTo(3l));
assertThat((double) propertiesCounts[1], equalTo(12.0));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5l));
@ -368,13 +380,14 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
@ -382,11 +395,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(avg, notNullValue());
assertThat(avg.getValue(), equalTo(1.5)); // (1 + 2) / 2
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l));
@ -394,11 +407,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(avg, notNullValue());
assertThat(avg.getValue(), equalTo(4.0)); // (3 + 4 + 5) / 3
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5l));
@ -428,31 +441,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(1l)); // 2
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l)); // 3, 4, 5
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4l));
@ -487,31 +501,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4l));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4l));
@ -547,31 +562,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(1l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4l));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 3l));
@ -612,13 +628,14 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(1l));
@ -627,11 +644,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(sum.getName(), equalTo("sum"));
assertThat(sum.getValue(), equalTo(2d+3d));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4l));
@ -640,11 +657,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(sum.getName(), equalTo("sum"));
assertThat(sum.getValue(), equalTo((double) 2+3+3+4+4+5+5+6));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 3L));
@ -674,31 +691,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5l));
@ -721,13 +739,14 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
@ -735,11 +754,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(avg, notNullValue());
assertThat(avg.getValue(), equalTo(1.5)); // (1 + 2) / 2
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l));
@ -747,11 +766,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(avg, notNullValue());
assertThat(avg.getValue(), equalTo(4.0)); // (3 + 4 + 5) / 3
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5l));
@ -779,22 +798,23 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(2));
Range.Bucket bucket = range.getBucketByKey("*--1.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*--1.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(-1.0));
assertThat((String) bucket.getKey(), equalTo("*--1.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(-1.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("-1.0"));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = range.getBucketByKey("1000.0-*");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("1000.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(1000d));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("1000.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(1000d));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("1000.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0l));
@ -816,31 +836,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4l));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4l));
@ -880,13 +901,14 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("r1");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r1"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("r1"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
@ -895,11 +917,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(sum.getName(), equalTo("sum"));
assertThat(sum.getValue(), equalTo((double) 1+2+2+3));
bucket = range.getBucketByKey("r2");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r2"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("r2"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4l));
@ -908,11 +930,11 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(sum.getName(), equalTo("sum"));
assertThat(sum.getValue(), equalTo((double) 2+3+3+4+4+5+5+6));
bucket = range.getBucketByKey("r3");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("r3"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("r3"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 4l));
@ -942,31 +964,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(0l));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(0l));
@ -990,31 +1013,32 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(3));
Range.Bucket bucket = range.getBucketByKey("*-3.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-3.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(3.0));
assertThat((String) bucket.getKey(), equalTo("*-3.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(3.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("3.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(3l));
bucket = range.getBucketByKey("6.0-*");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("6.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(6.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("6.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(6.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("6.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 5l));
@ -1037,40 +1061,41 @@ public class RangeTests extends ElasticsearchIntegrationTest {
Range range = response.getAggregations().get("range");
assertThat(range, notNullValue());
assertThat(range.getName(), equalTo("range"));
List<? extends Bucket> buckets = range.getBuckets();
assertThat(range.getBuckets().size(), equalTo(4));
Range.Bucket bucket = range.getBucketByKey("*-5.0");
Range.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("*-5.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(bucket.getTo().doubleValue(), equalTo(5.0));
assertThat((String) bucket.getKey(), equalTo("*-5.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0));
assertThat(bucket.getFromAsString(), nullValue());
assertThat(bucket.getToAsString(), equalTo("5.0"));
assertThat(bucket.getDocCount(), equalTo(4l));
bucket = range.getBucketByKey("3.0-6.0");
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("3.0-6.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(3.0));
assertThat(bucket.getTo().doubleValue(), equalTo(6.0));
assertThat((String) bucket.getKey(), equalTo("3.0-6.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(3.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(6.0));
assertThat(bucket.getFromAsString(), equalTo("3.0"));
assertThat(bucket.getToAsString(), equalTo("6.0"));
assertThat(bucket.getDocCount(), equalTo(4l));
bucket = range.getBucketByKey("4.0-5.0");
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("4.0-5.0"));
assertThat(bucket.getFrom().doubleValue(), equalTo(4.0));
assertThat(bucket.getTo().doubleValue(), equalTo(5.0));
assertThat((String) bucket.getKey(), equalTo("4.0-5.0"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(5.0));
assertThat(bucket.getFromAsString(), equalTo("4.0"));
assertThat(bucket.getToAsString(), equalTo("5.0"));
assertThat(bucket.getDocCount(), equalTo(2l));
bucket = range.getBucketByKey("4.0-*");
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("4.0-*"));
assertThat(bucket.getFrom().doubleValue(), equalTo(4.0));
assertThat(bucket.getTo().doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat((String) bucket.getKey(), equalTo("4.0-*"));
assertThat(((Number) bucket.getFrom()).doubleValue(), equalTo(4.0));
assertThat(((Number) bucket.getTo()).doubleValue(), equalTo(Double.POSITIVE_INFINITY));
assertThat(bucket.getFromAsString(), equalTo("4.0"));
assertThat(bucket.getToAsString(), nullValue());
assertThat(bucket.getDocCount(), equalTo(numDocs - 2l));
@ -1087,7 +1112,7 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(0l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Range range = bucket.getAggregations().get("range");
@ -1095,9 +1120,9 @@ public class RangeTests extends ElasticsearchIntegrationTest {
assertThat(range, Matchers.notNullValue());
assertThat(range.getName(), equalTo("range"));
assertThat(buckets.size(), is(1));
assertThat(buckets.get(0).getKey(), equalTo("0-2"));
assertThat(buckets.get(0).getFrom().doubleValue(), equalTo(0.0));
assertThat(buckets.get(0).getTo().doubleValue(), equalTo(2.0));
assertThat((String) buckets.get(0).getKey(), equalTo("0-2"));
assertThat(((Number) buckets.get(0).getFrom()).doubleValue(), equalTo(0.0));
assertThat(((Number) buckets.get(0).getTo()).doubleValue(), equalTo(2.0));
assertThat(buckets.get(0).getFromAsString(), equalTo("0.0"));
assertThat(buckets.get(0).getToAsString(), equalTo("2.0"));
assertThat(buckets.get(0).getDocCount(), equalTo(0l));

View File

@ -41,11 +41,18 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.elasticsearch.search.aggregations.AggregationBuilders.count;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.nested;
import static org.elasticsearch.search.aggregations.AggregationBuilders.reverseNested;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
import static org.hamcrest.core.IsNull.notNullValue;
/**
@ -160,7 +167,7 @@ public class ReverseNestedTests extends ElasticsearchIntegrationTest {
// nested.field2: 1
Terms.Bucket bucket = usernameBuckets.get(0);
assertThat(bucket.getKey(), equalTo("1"));
assertThat(bucket.getKeyAsString(), equalTo("1"));
assertThat(bucket.getDocCount(), equalTo(6l));
ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1");
assertThat((long) reverseNested.getProperty("_count"), equalTo(5l));
@ -168,153 +175,153 @@ public class ReverseNestedTests extends ElasticsearchIntegrationTest {
assertThat((Terms) reverseNested.getProperty("field1"), sameInstance(tags));
List<Terms.Bucket> tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(6));
assertThat(tagsBuckets.get(0).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(4l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(3l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("e"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(4).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(4).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(5).getKey(), equalTo("x"));
assertThat(tagsBuckets.get(5).getKeyAsString(), equalTo("x"));
assertThat(tagsBuckets.get(5).getDocCount(), equalTo(1l));
// nested.field2: 4
bucket = usernameBuckets.get(1);
assertThat(bucket.getKey(), equalTo("4"));
assertThat(bucket.getKeyAsString(), equalTo("4"));
assertThat(bucket.getDocCount(), equalTo(4l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(5));
assertThat(tagsBuckets.get(0).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(3l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(4).getKey(), equalTo("e"));
assertThat(tagsBuckets.get(4).getKeyAsString(), equalTo("e"));
assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1l));
// nested.field2: 7
bucket = usernameBuckets.get(2);
assertThat(bucket.getKey(), equalTo("7"));
assertThat(bucket.getKeyAsString(), equalTo("7"));
assertThat(bucket.getDocCount(), equalTo(3l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(5));
assertThat(tagsBuckets.get(0).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("e"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(4).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(4).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(4).getDocCount(), equalTo(1l));
// nested.field2: 2
bucket = usernameBuckets.get(3);
assertThat(bucket.getKey(), equalTo("2"));
assertThat(bucket.getKeyAsString(), equalTo("2"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(3));
assertThat(tagsBuckets.get(0).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
// nested.field2: 3
bucket = usernameBuckets.get(4);
assertThat(bucket.getKey(), equalTo("3"));
assertThat(bucket.getKeyAsString(), equalTo("3"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(3));
assertThat(tagsBuckets.get(0).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
// nested.field2: 5
bucket = usernameBuckets.get(5);
assertThat(bucket.getKey(), equalTo("5"));
assertThat(bucket.getKeyAsString(), equalTo("5"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("z"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("z"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
// nested.field2: 6
bucket = usernameBuckets.get(6);
assertThat(bucket.getKey(), equalTo("6"));
assertThat(bucket.getKeyAsString(), equalTo("6"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("y"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("y"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
// nested.field2: 8
bucket = usernameBuckets.get(7);
assertThat(bucket.getKey(), equalTo("8"));
assertThat(bucket.getKeyAsString(), equalTo("8"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(2l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("e"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("x"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("x"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
// nested.field2: 9
bucket = usernameBuckets.get(8);
assertThat(bucket.getKey(), equalTo("9"));
assertThat(bucket.getKeyAsString(), equalTo("9"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("e"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("e"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("z"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("z"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
}
@ -373,75 +380,75 @@ public class ReverseNestedTests extends ElasticsearchIntegrationTest {
List<Terms.Bucket> usernameBuckets = new ArrayList<>(usernames.getBuckets());
Terms.Bucket bucket = usernameBuckets.get(0);
assertThat(bucket.getKey(), equalTo("0"));
assertThat(bucket.getKeyAsString(), equalTo("0"));
assertThat(bucket.getDocCount(), equalTo(12l));
ReverseNested reverseNested = bucket.getAggregations().get("nested1_to_field1");
assertThat(reverseNested.getDocCount(), equalTo(5l));
Terms tags = reverseNested.getAggregations().get("field1");
List<Terms.Bucket> tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(2));
assertThat(tagsBuckets.get(0).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(3l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(2l));
bucket = usernameBuckets.get(1);
assertThat(bucket.getKey(), equalTo("1"));
assertThat(bucket.getKeyAsString(), equalTo("1"));
assertThat(bucket.getDocCount(), equalTo(6l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
assertThat(reverseNested.getDocCount(), equalTo(4l));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("e"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("e"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
bucket = usernameBuckets.get(2);
assertThat(bucket.getKey(), equalTo("2"));
assertThat(bucket.getKeyAsString(), equalTo("2"));
assertThat(bucket.getDocCount(), equalTo(5l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
assertThat(reverseNested.getDocCount(), equalTo(4l));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(4));
assertThat(tagsBuckets.get(0).getKey(), equalTo("a"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("a"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("b"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("b"));
assertThat(tagsBuckets.get(1).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(2).getKey(), equalTo("c"));
assertThat(tagsBuckets.get(2).getKeyAsString(), equalTo("c"));
assertThat(tagsBuckets.get(2).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(3).getKey(), equalTo("e"));
assertThat(tagsBuckets.get(3).getKeyAsString(), equalTo("e"));
assertThat(tagsBuckets.get(3).getDocCount(), equalTo(1l));
bucket = usernameBuckets.get(3);
assertThat(bucket.getKey(), equalTo("3"));
assertThat(bucket.getKeyAsString(), equalTo("3"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
assertThat(reverseNested.getDocCount(), equalTo(2l));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(2));
assertThat(tagsBuckets.get(0).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("f"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("f"));
bucket = usernameBuckets.get(4);
assertThat(bucket.getKey(), equalTo("4"));
assertThat(bucket.getKeyAsString(), equalTo("4"));
assertThat(bucket.getDocCount(), equalTo(2l));
reverseNested = bucket.getAggregations().get("nested1_to_field1");
assertThat(reverseNested.getDocCount(), equalTo(2l));
tags = reverseNested.getAggregations().get("field1");
tagsBuckets = new ArrayList<>(tags.getBuckets());
assertThat(tagsBuckets.size(), equalTo(2));
assertThat(tagsBuckets.get(0).getKey(), equalTo("d"));
assertThat(tagsBuckets.get(0).getKeyAsString(), equalTo("d"));
assertThat(tagsBuckets.get(0).getDocCount(), equalTo(1l));
assertThat(tagsBuckets.get(1).getKey(), equalTo("f"));
assertThat(tagsBuckets.get(1).getKeyAsString(), equalTo("f"));
}
@Test(expected = SearchPhaseExecutionException.class)

View File

@ -27,19 +27,27 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
import org.elasticsearch.search.aggregations.bucket.nested.Nested;
import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.date.DateRange;
import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4Range;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.*;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.geohashGrid;
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.ipRange;
import static org.elasticsearch.search.aggregations.AggregationBuilders.missing;
import static org.elasticsearch.search.aggregations.AggregationBuilders.nested;
import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
@ -87,13 +95,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(global("global")
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Global global = response.getAggregations().get("global");
DateHistogram histo = global.getAggregations().get("histo");
Histogram histo = global.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -103,13 +111,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(filter("filter").filter(FilterBuilders.matchAllFilter())
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Filter filter = response.getAggregations().get("filter");
DateHistogram histo = filter.getAggregations().get("histo");
Histogram histo = filter.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -119,13 +127,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(missing("missing").field("foobar")
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Missing missing = response.getAggregations().get("missing");
DateHistogram histo = missing.getAggregations().get("histo");
Histogram histo = missing.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -137,7 +145,7 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
.addAggregation(global("global")
.subAggregation(filter("filter").filter(FilterBuilders.matchAllFilter())
.subAggregation(missing("missing").field("foobar")
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))))
.execute().actionGet();
assertSearchResponse(response);
@ -145,7 +153,7 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
Global global = response.getAggregations().get("global");
Filter filter = global.getAggregations().get("filter");
Missing missing = filter.getAggregations().get("missing");
DateHistogram histo = missing.getAggregations().get("histo");
Histogram histo = missing.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -155,13 +163,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(nested("nested").path("nested")
.subAggregation(dateHistogram("histo").field("nested.date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("nested.date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Nested nested = response.getAggregations().get("nested");
DateHistogram histo = nested.getAggregations().get("histo");
Histogram histo = nested.getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -172,13 +180,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(terms("terms").field("term-s")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
DateHistogram histo = terms.getBucketByKey("term").getAggregations().get("histo");
Histogram histo = terms.getBucketByKey("term").getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -189,13 +197,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(terms("terms").field("term-l")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
DateHistogram histo = terms.getBucketByKey("1").getAggregations().get("histo");
Histogram histo = terms.getBucketByKey("1").getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -206,13 +214,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(terms("terms").field("term-d")
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Terms terms = response.getAggregations().get("terms");
DateHistogram histo = terms.getBucketByKey("1.5").getAggregations().get("histo");
Histogram histo = terms.getBucketByKey("1.5").getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -222,13 +230,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(range("range").field("value").addRange("r1", 0, 10)
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Range range = response.getAggregations().get("range");
DateHistogram histo = range.getBucketByKey("r1").getAggregations().get("histo");
Histogram histo = range.getBuckets().get(0).getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -238,13 +246,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(dateRange("range").field("date").addRange("r1", "2014-01-01", "2014-01-10")
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
DateRange range = response.getAggregations().get("range");
DateHistogram histo = range.getBucketByKey("r1").getAggregations().get("histo");
Range range = response.getAggregations().get("range");
Histogram histo = range.getBuckets().get(0).getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -254,13 +262,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(ipRange("range").field("ip").addRange("r1", "10.0.0.1", "10.0.0.10")
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
IPv4Range range = response.getAggregations().get("range");
DateHistogram histo = range.getBucketByKey("r1").getAggregations().get("histo");
Range range = response.getAggregations().get("range");
Histogram histo = range.getBuckets().get(0).getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -270,13 +278,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(histogram("topHisto").field("value").interval(5)
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
Histogram topHisto = response.getAggregations().get("topHisto");
DateHistogram histo = topHisto.getBucketByKey(0).getAggregations().get("histo");
Histogram histo = topHisto.getBuckets().get(0).getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -285,14 +293,14 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(dateHistogram("topHisto").field("date").interval(DateHistogram.Interval.MONTH)
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.addAggregation(dateHistogram("topHisto").field("date").interval(DateHistogramInterval.MONTH)
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
DateHistogram topHisto = response.getAggregations().get("topHisto");
DateHistogram histo = topHisto.getBuckets().iterator().next().getAggregations().get("histo");
Histogram topHisto = response.getAggregations().get("topHisto");
Histogram histo = topHisto.getBuckets().iterator().next().getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}
@ -303,13 +311,13 @@ public class ShardReduceTests extends ElasticsearchIntegrationTest {
SearchResponse response = client().prepareSearch("idx")
.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(geohashGrid("grid").field("location")
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogram.Interval.DAY).minDocCount(0)))
.subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0)))
.execute().actionGet();
assertSearchResponse(response);
GeoHashGrid grid = response.getAggregations().get("grid");
DateHistogram histo = grid.getBuckets().iterator().next().getAggregations().get("histo");
Histogram histo = grid.getBuckets().iterator().next().getAggregations().get("histo");
assertThat(histo.getBuckets().size(), equalTo(4));
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
@ -54,7 +55,7 @@ public class ShardSizeTermsTests extends ShardSizeTests {
.put("2", 5l)
.build();
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string())));
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
@ -79,7 +80,7 @@ public class ShardSizeTermsTests extends ShardSizeTests {
.put("2", 4l)
.build();
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string())));
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
@ -105,7 +106,7 @@ public class ShardSizeTermsTests extends ShardSizeTests {
.put("2", 5l) // <-- count is now fixed
.build();
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string())));
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}
@ -156,7 +157,7 @@ public class ShardSizeTermsTests extends ShardSizeTests {
.put("3", 8l)
.build();
for (Terms.Bucket bucket : buckets) {
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsText().string())));
assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString())));
}
}

View File

@ -27,11 +27,12 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
import org.elasticsearch.test.ElasticsearchBackwardsCompatIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
@ -109,8 +110,8 @@ public class SignificantTermsBackwardCompatibilityTests extends ElasticsearchBac
assertTrue(aggs.containsKey("sig_terms"));
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
assertThat(agg.getBuckets().size(), equalTo(1));
String term = agg.iterator().next().getKey();
String classTerm = classBucket.getKey();
String term = agg.iterator().next().getKeyAsString();
String classTerm = classBucket.getKeyAsString();
assertTrue(term.equals(classTerm));
}
}

View File

@ -36,7 +36,15 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.*;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificantTermsHeuristicModule;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.TransportSignificantTermsHeuristicModule;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
@ -46,7 +54,11 @@ import org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
@ -54,7 +66,9 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
/**
*
@ -105,8 +119,8 @@ public class SignificantTermsSignificanceScoreTests extends ElasticsearchIntegra
assertThat(agg.getBuckets().size(), equalTo(2));
Iterator<SignificantTerms.Bucket> bucketIterator = agg.iterator();
SignificantTerms.Bucket sigBucket = bucketIterator.next();
String term = sigBucket.getKey();
String classTerm = classBucket.getKey();
String term = sigBucket.getKeyAsString();
String classTerm = classBucket.getKeyAsString();
assertTrue(term.equals(classTerm));
assertThat(sigBucket.getSignificanceScore(), closeTo(2.0, 1.e-8));
sigBucket = bucketIterator.next();
@ -137,8 +151,8 @@ public class SignificantTermsSignificanceScoreTests extends ElasticsearchIntegra
assertThat(agg.getBuckets().size(), equalTo(2));
Iterator<SignificantTerms.Bucket> bucketIterator = agg.iterator();
SignificantTerms.Bucket sigBucket = bucketIterator.next();
String term = sigBucket.getKey();
String classTerm = classBucket.getKey();
String term = sigBucket.getKeyAsString();
String classTerm = classBucket.getKeyAsString();
assertTrue(term.equals(classTerm));
assertThat(sigBucket.getSignificanceScore(), closeTo(2.0, 1.e-8));
sigBucket = bucketIterator.next();
@ -246,8 +260,8 @@ public class SignificantTermsSignificanceScoreTests extends ElasticsearchIntegra
assertTrue(aggs.containsKey("sig_terms"));
SignificantTerms agg = (SignificantTerms) aggs.get("sig_terms");
assertThat(agg.getBuckets().size(), equalTo(1));
String term = agg.iterator().next().getKey();
String classTerm = classBucket.getKey();
String term = agg.iterator().next().getKeyAsString();
String classTerm = classBucket.getKeyAsString();
assertTrue(term.equals(classTerm));
}

View File

@ -46,7 +46,9 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
/**
*
@ -120,7 +122,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
.actionGet();
assertSearchResponse(response);
SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms");
Number topCategory = topTerms.getBuckets().iterator().next().getKeyAsNumber();
Number topCategory = (Number) topTerms.getBuckets().iterator().next().getKey();
assertTrue(topCategory.equals(new Long(SNOWBOARDING_CATEGORY)));
}
@ -137,7 +139,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
.actionGet();
assertSearchResponse(response);
SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms");
Number topCategory = topTerms.getBuckets().iterator().next().getKeyAsNumber();
Number topCategory = (Number) topTerms.getBuckets().iterator().next().getKey();
assertTrue(topCategory.equals(new Long(OTHER_CATEGORY)));
}
@ -152,7 +154,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms");
Set<String> terms = new HashSet<>();
for (Bucket topTerm : topTerms) {
terms.add(topTerm.getKey());
terms.add(topTerm.getKeyAsString());
}
assertThat(terms, hasSize(6));
assertThat(terms.contains("jam"), is(true));
@ -171,7 +173,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
topTerms = response.getAggregations().get("mySignificantTerms");
terms = new HashSet<>();
for (Bucket topTerm : topTerms) {
terms.add(topTerm.getKey());
terms.add(topTerm.getKeyAsString());
}
assertThat(terms, hasSize(1));
assertThat(terms.contains("weller"), is(true));
@ -189,7 +191,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms");
Set<String> terms = new HashSet<>();
for (Bucket topTerm : topTerms) {
terms.add(topTerm.getKey());
terms.add(topTerm.getKeyAsString());
}
assertThat(terms, hasSize(6));
assertThat(terms.contains("jam"), is(true));
@ -208,7 +210,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
topTerms = response.getAggregations().get("mySignificantTerms");
terms = new HashSet<>();
for (Bucket topTerm : topTerms) {
terms.add(topTerm.getKey());
terms.add(topTerm.getKeyAsString());
}
assertThat(terms, hasSize(1));
assertThat(terms.contains("weller"), is(true));
@ -318,7 +320,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms");
HashSet<String> topWords = new HashSet<String>();
for (Bucket topTerm : topTerms) {
topWords.add(topTerm.getKey());
topWords.add(topTerm.getKeyAsString());
}
//The word "paul" should be a constant of all docs in the background set and therefore not seen as significant
assertFalse(topWords.contains("paul"));
@ -347,9 +349,9 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
SignificantTerms topTerms = topCategory.getAggregations().get("mySignificantTerms");
HashSet<String> foundTopWords = new HashSet<String>();
for (Bucket topTerm : topTerms) {
foundTopWords.add(topTerm.getKey());
foundTopWords.add(topTerm.getKeyAsString());
}
String[] expectedKeywords = expectedKeywordsByCategory[Integer.parseInt(topCategory.getKey()) - 1];
String[] expectedKeywords = expectedKeywordsByCategory[Integer.parseInt(topCategory.getKeyAsString()) - 1];
for (String expectedKeyword : expectedKeywords) {
assertTrue(expectedKeyword + " missing from category keywords", foundTopWords.contains(expectedKeyword));
}
@ -377,7 +379,7 @@ public class SignificantTermsTests extends ElasticsearchIntegrationTest {
private void checkExpectedStringTermsFound(SignificantTerms topTerms) {
HashMap<String,Bucket>topWords=new HashMap<>();
for (Bucket topTerm : topTerms ){
topWords.put(topTerm.getKey(),topTerm);
topWords.put(topTerm.getKeyAsString(), topTerm);
}
assertTrue( topWords.containsKey("haakonsen"));
assertTrue( topWords.containsKey("craig"));

View File

@ -227,7 +227,7 @@ public class StringTermsTests extends AbstractTermsTests {
}
private String key(Terms.Bucket bucket) {
return randomBoolean() ? bucket.getKey() : bucket.getKeyAsText().string();
return bucket.getKeyAsString();
}
@Test
@ -1101,7 +1101,7 @@ public class StringTermsTests extends AbstractTermsTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
Terms terms = bucket.getAggregations().get("terms");

View File

@ -40,7 +40,10 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.core.IsNull.notNullValue;
@ElasticsearchIntegrationTest.SuiteScopeTest
@ -111,7 +114,7 @@ public class TermsDocCountErrorTests extends ElasticsearchIntegrationTest{
for (Terms.Bucket testBucket : testBuckets) {
assertThat(testBucket, notNullValue());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKey());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKeyAsString());
assertThat(accurateBucket, notNullValue());
assertThat(accurateBucket.getDocCountError(), equalTo(0l));
assertThat(testBucket.getDocCountError(), lessThanOrEqualTo(testTerms.getDocCountError()));
@ -121,7 +124,7 @@ public class TermsDocCountErrorTests extends ElasticsearchIntegrationTest{
for (Terms.Bucket accurateBucket: accurateTerms.getBuckets()) {
assertThat(accurateBucket, notNullValue());
Terms.Bucket testBucket = accurateTerms.getBucketByKey(accurateBucket.getKey());
Terms.Bucket testBucket = accurateTerms.getBucketByKey(accurateBucket.getKeyAsString());
if (testBucket == null) {
assertThat(accurateBucket.getDocCount(), lessThanOrEqualTo(testTerms.getDocCountError()));
}
@ -145,7 +148,7 @@ public class TermsDocCountErrorTests extends ElasticsearchIntegrationTest{
for (Terms.Bucket testBucket : testBuckets) {
assertThat(testBucket, notNullValue());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKey());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKeyAsString());
assertThat(accurateBucket, notNullValue());
assertThat(accurateBucket.getDocCountError(), equalTo(0l));
assertThat(testBucket.getDocCountError(), equalTo(0l));
@ -182,7 +185,7 @@ public class TermsDocCountErrorTests extends ElasticsearchIntegrationTest{
for (Terms.Bucket testBucket : testBuckets) {
assertThat(testBucket, notNullValue());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKey());
Terms.Bucket accurateBucket = accurateTerms.getBucketByKey(testBucket.getKeyAsString());
assertThat(accurateBucket, notNullValue());
assertThat(accurateBucket.getDocCountError(), equalTo(0l));
assertThat(testBucket.getDocCountError(), anyOf(equalTo(-1l), equalTo(0l)));

View File

@ -229,7 +229,7 @@ public class TopHitsTests extends ElasticsearchIntegrationTest {
}
private String key(Terms.Bucket bucket) {
return randomBoolean() ? bucket.getKey() : bucket.getKeyAsText().string();
return bucket.getKeyAsString();
}
@Test
@ -857,7 +857,7 @@ public class TopHitsTests extends ElasticsearchIntegrationTest {
Histogram histogram = searchResponse.getAggregations().get("dates");
for (int i = 0; i < numArticles; i += 5) {
Histogram.Bucket bucket = histogram.getBucketByKey(i);
Histogram.Bucket bucket = histogram.getBuckets().get(i / 5);
assertThat(bucket.getDocCount(), equalTo(5l));
long numNestedDocs = 10 + (5 * i);

View File

@ -48,7 +48,7 @@ public class AvgTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
Avg avg = bucket.getAggregations().get("avg");

View File

@ -64,7 +64,7 @@ public class ExtendedStatsTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
ExtendedStats stats = bucket.getAggregations().get("stats");

View File

@ -47,7 +47,7 @@ public class MaxTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
Max max = bucket.getAggregations().get("max");

View File

@ -47,7 +47,7 @@ public class MinTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
Min min = bucket.getAggregations().get("min");

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.metrics;
import com.google.common.collect.Lists;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.search.aggregations.bucket.global.Global;
@ -111,7 +112,7 @@ public class PercentileRanksTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
PercentileRanks reversePercentiles = bucket.getAggregations().get("percentile_ranks");

View File

@ -112,7 +112,7 @@ public class PercentilesTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
Percentiles percentiles = bucket.getAggregations().get("percentiles");

View File

@ -719,7 +719,7 @@ public class ScriptedMetricTests extends ElasticsearchIntegrationTest {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
ScriptedMetric scriptedMetric = bucket.getAggregations().get("scripted");

View File

@ -52,7 +52,7 @@ public class StatsTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
Stats stats = bucket.getAggregations().get("stats");

View File

@ -47,7 +47,7 @@ public class SumTests extends AbstractNumericTests {
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBucketByKey(1l);
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
Sum sum = bucket.getAggregations().get("sum");

View File

@ -44,7 +44,12 @@ import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper.Loading;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.query.*;
import org.elasticsearch.index.query.FilterBuilder;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.HasChildFilterBuilder;
import org.elasticsearch.index.query.HasChildQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@ -60,7 +65,12 @@ import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
@ -70,12 +80,42 @@ import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
import static org.elasticsearch.common.settings.ImmutableSettings.builder;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.query.FilterBuilders.boolFilter;
import static org.elasticsearch.index.query.FilterBuilders.hasParentFilter;
import static org.elasticsearch.index.query.FilterBuilders.matchAllFilter;
import static org.elasticsearch.index.query.FilterBuilders.notFilter;
import static org.elasticsearch.index.query.FilterBuilders.queryFilter;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.index.query.FilterBuilders.termsFilter;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.topChildrenQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.factorFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
/**
*
@ -578,9 +618,9 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
Filter filter = global.getAggregations().get("filter");
Terms termsFacet = filter.getAggregations().get("facet1");
assertThat(termsFacet.getBuckets().size(), equalTo(2));
assertThat(termsFacet.getBuckets().get(0).getKey(), equalTo("red"));
assertThat(termsFacet.getBuckets().get(0).getKeyAsString(), equalTo("red"));
assertThat(termsFacet.getBuckets().get(0).getDocCount(), equalTo(2L));
assertThat(termsFacet.getBuckets().get(1).getKey(), equalTo("yellow"));
assertThat(termsFacet.getBuckets().get(1).getKeyAsString(), equalTo("yellow"));
assertThat(termsFacet.getBuckets().get(1).getDocCount(), equalTo(1L));
}

View File

@ -44,12 +44,20 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.*;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.gaussDecayFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.linearDecayFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.weightFactorFunction;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
public class FunctionScoreTests extends ElasticsearchIntegrationTest {
@ -439,7 +447,7 @@ public class FunctionScoreTests extends ElasticsearchIntegrationTest {
).actionGet();
assertSearchResponse(response);
assertThat(response.getHits().getAt(0).score(), equalTo(1.0f));
assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsNumber().floatValue(), is(1f));
assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsString(), equalTo("1.0"));
assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1l));
}