Adds a hard_bounds parameter to explicitly limit the buckets that a histogram can generate. This is especially useful in case of open ended ranges that can produce a very large number of buckets.
This commit is contained in:
parent
10be10c99b
commit
2408803fad
|
@ -600,3 +600,48 @@ setup:
|
|||
- match: { profile.shards.0.aggregations.0.description: histo }
|
||||
- match: { profile.shards.0.aggregations.0.breakdown.collect_count: 4 }
|
||||
- match: { profile.shards.0.aggregations.0.debug.total_buckets: 3 }
|
||||
|
||||
---
|
||||
"histogram with hard bounds":
|
||||
- skip:
|
||||
version: " - 7.9.99"
|
||||
reason: hard_bounds were introduced in 7.10.0
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_3
|
||||
body:
|
||||
mappings:
|
||||
properties:
|
||||
range:
|
||||
type: long_range
|
||||
|
||||
- do:
|
||||
bulk:
|
||||
index: test_3
|
||||
refresh: true
|
||||
body:
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"lte": 10}}'
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"gte": 15}}'
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test_3
|
||||
body:
|
||||
size: 0
|
||||
aggs:
|
||||
histo:
|
||||
histogram:
|
||||
field: range
|
||||
interval: 1
|
||||
hard_bounds:
|
||||
min: 0
|
||||
max: 20
|
||||
- match: { hits.total.value: 2 }
|
||||
- length: { aggregations.histo.buckets: 21 }
|
||||
- match: { aggregations.histo.buckets.0.key: 0 }
|
||||
- match: { aggregations.histo.buckets.0.doc_count: 1 }
|
||||
- match: { aggregations.histo.buckets.20.key: 20 }
|
||||
- match: { aggregations.histo.buckets.20.doc_count: 1 }
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
setup:
|
||||
- skip:
|
||||
version: " - 7.1.99"
|
||||
reason: calendar_interval introduced in 7.2.0
|
||||
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_date_hist
|
||||
body:
|
||||
settings:
|
||||
# There was a BWC issue that only showed up on empty shards. This
|
||||
# test has 4 docs and 5 shards makes sure we get one empty.
|
||||
number_of_shards: 5
|
||||
mappings:
|
||||
properties:
|
||||
range:
|
||||
type: date_range
|
||||
|
||||
- do:
|
||||
bulk:
|
||||
index: test_date_hist
|
||||
refresh: true
|
||||
body:
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"gte": "2016-01-01", "lt": "2016-01-02"}}'
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"gte": "2016-01-02", "lt": "2016-01-03"}}'
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"gte": "2016-02-01", "lt": "2016-02-02"}}'
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"gte": "2016-03-01", "lt": "2016-03-02"}}'
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"gte": "2016-04-01"}}'
|
||||
- '{"index": {}}'
|
||||
- '{"range": {"lt": "2016-02-01"}}'
|
||||
|
||||
---
|
||||
"date_histogram on range with hard bounds":
|
||||
- skip:
|
||||
version: " - 7.9.99"
|
||||
reason: hard_bounds introduced in 7.10.0
|
||||
|
||||
- do:
|
||||
search:
|
||||
body:
|
||||
size: 0
|
||||
aggs:
|
||||
histo:
|
||||
date_histogram:
|
||||
field: range
|
||||
calendar_interval: month
|
||||
hard_bounds:
|
||||
"min": "2015-06-01"
|
||||
"max": "2016-06-01"
|
||||
|
||||
- match: { hits.total.value: 6 }
|
||||
- length: { aggregations.histo.buckets: 13 }
|
||||
- match: { aggregations.histo.buckets.0.key_as_string: "2015-06-01T00:00:00.000Z" }
|
||||
- match: { aggregations.histo.buckets.0.doc_count: 1 }
|
||||
- match: { aggregations.histo.buckets.8.key_as_string: "2016-02-01T00:00:00.000Z" }
|
||||
- match: { aggregations.histo.buckets.8.doc_count: 1 }
|
||||
- match: { aggregations.histo.buckets.12.key_as_string: "2016-06-01T00:00:00.000Z" }
|
||||
- match: { aggregations.histo.buckets.12.doc_count: 1 }
|
|
@ -39,7 +39,7 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
|||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.LongBounds;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
|
||||
|
@ -1085,7 +1085,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.dateHistogramInterval(DateHistogramInterval.days(interval))
|
||||
.minDocCount(0)
|
||||
// when explicitly specifying a format, the extended bounds should be defined by the same format
|
||||
.extendedBounds(new ExtendedBounds(format(boundsMin, pattern), format(boundsMax, pattern)))
|
||||
.extendedBounds(new LongBounds(format(boundsMin, pattern), format(boundsMax, pattern)))
|
||||
.format(pattern))
|
||||
.get();
|
||||
|
||||
|
@ -1153,7 +1153,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId()))
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1))
|
||||
.timeZone(timezone).minDocCount(0).extendedBounds(new ExtendedBounds("now/d", "now/d+23h"))
|
||||
.timeZone(timezone).minDocCount(0).extendedBounds(new LongBounds("now/d", "now/d+23h"))
|
||||
).get();
|
||||
assertSearchResponse(response);
|
||||
|
||||
|
@ -1206,7 +1206,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.addAggregation(
|
||||
dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.days(1))
|
||||
.offset("+6h").minDocCount(0)
|
||||
.extendedBounds(new ExtendedBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z"))
|
||||
.extendedBounds(new LongBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z"))
|
||||
).get();
|
||||
assertSearchResponse(response);
|
||||
|
||||
|
@ -1378,7 +1378,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
SearchResponse response = client().prepareSearch(indexDateUnmapped)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("dateField").dateHistogramInterval(DateHistogramInterval.MONTH).format("yyyy-MM")
|
||||
.minDocCount(0).extendedBounds(new ExtendedBounds("2018-01", "2018-01")))
|
||||
.minDocCount(0).extendedBounds(new LongBounds("2018-01", "2018-01")))
|
||||
.get();
|
||||
assertSearchResponse(response);
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
|
@ -1434,7 +1434,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.setQuery(new MatchNoneQueryBuilder())
|
||||
.addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo"))
|
||||
.calendarInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds(
|
||||
new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")))
|
||||
new LongBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")))
|
||||
.get();
|
||||
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
|
@ -1451,7 +1451,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
.setQuery(new MatchNoneQueryBuilder())
|
||||
.addAggregation(dateHistogram("histo").field("date").timeZone(ZoneId.of("Europe/Oslo"))
|
||||
.dateHistogramInterval(DateHistogramInterval.HOUR).minDocCount(0).extendedBounds(
|
||||
new ExtendedBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")))
|
||||
new LongBounds("2015-10-25T02:00:00.000+02:00", "2015-10-25T04:00:00.000+01:00")))
|
||||
.get();
|
||||
|
||||
histo = response.getAggregations().get("histo");
|
||||
|
@ -1649,4 +1649,23 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
assertThat(buckets.get(1).getKeyAsString(), equalTo("2012-02-01T00:00:00.000-07:00"));
|
||||
assertThat(buckets.get(2).getKeyAsString(), equalTo("2012-03-01T00:00:00.000-07:00"));
|
||||
}
|
||||
|
||||
public void testHardBoundsOnDates() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(dateHistogram("histo")
|
||||
.field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.hardBounds(new LongBounds("2012-02-01T00:00:00.000", "2012-03-03T00:00:00.000"))
|
||||
)
|
||||
.get();
|
||||
|
||||
assertSearchResponse(response);
|
||||
|
||||
InternalDateHistogram histogram = response.getAggregations().get("histo");
|
||||
List<InternalDateHistogram.Bucket> buckets = histogram.getBuckets();
|
||||
assertThat(buckets.size(), equalTo(30));
|
||||
assertThat(buckets.get(1).getKeyAsString(), equalTo("2012-02-03T00:00:00.000Z"));
|
||||
assertThat(buckets.get(29).getKeyAsString(), equalTo("2012-03-02T00:00:00.000Z"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.script.ScriptType;
|
|||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DoubleBounds;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
|
||||
import org.elasticsearch.search.aggregations.metrics.Avg;
|
||||
|
@ -1194,6 +1195,58 @@ public class HistogramIT extends ESIntegTestCase {
|
|||
assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true));
|
||||
}
|
||||
|
||||
public void testInvalidBounds() {
|
||||
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("empty_bucket_idx")
|
||||
.addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(0.0, 10.0))
|
||||
.extendedBounds(3, 20)).get());
|
||||
assertThat(e.toString(), containsString("Extended bounds have to be inside hard bounds, hard bounds"));
|
||||
|
||||
e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("empty_bucket_idx")
|
||||
.addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(3.0, null))
|
||||
.extendedBounds(0, 20)).get());
|
||||
assertThat(e.toString(), containsString("Extended bounds have to be inside hard bounds, hard bounds"));
|
||||
}
|
||||
|
||||
public void testHardBounds() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type", "d", "type=double").get());
|
||||
indexRandom(true,
|
||||
client().prepareIndex("test", "type", "1").setSource("d", -0.6),
|
||||
client().prepareIndex("test", "type", "2").setSource("d", 0.5),
|
||||
client().prepareIndex("test", "type", "3").setSource("d", 0.1));
|
||||
|
||||
SearchResponse r = client().prepareSearch("test")
|
||||
.addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(0.0, null)))
|
||||
.get();
|
||||
assertSearchResponse(r);
|
||||
|
||||
Histogram histogram = r.getAggregations().get("histo");
|
||||
List<? extends Bucket> buckets = histogram.getBuckets();
|
||||
assertEquals(5, buckets.size());
|
||||
assertEquals(0.1, (double) buckets.get(0).getKey(), 0.01d);
|
||||
assertEquals(0.5, (double) buckets.get(4).getKey(), 0.01d);
|
||||
|
||||
r = client().prepareSearch("test")
|
||||
.addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(null, 0.0)))
|
||||
.get();
|
||||
assertSearchResponse(r);
|
||||
|
||||
histogram = r.getAggregations().get("histo");
|
||||
buckets = histogram.getBuckets();
|
||||
assertEquals(1, buckets.size());
|
||||
assertEquals(-0.6, (double) buckets.get(0).getKey(), 0.01d);
|
||||
|
||||
r = client().prepareSearch("test")
|
||||
.addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(0.0, 3.0)))
|
||||
.get();
|
||||
assertSearchResponse(r);
|
||||
|
||||
histogram = r.getAggregations().get("histo");
|
||||
buckets = histogram.getBuckets();
|
||||
assertEquals(1, buckets.size());
|
||||
assertEquals(0.1, (double) buckets.get(0).getKey(), 0.01d);
|
||||
|
||||
}
|
||||
|
||||
private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("sort_idx")
|
||||
|
|
|
@ -50,6 +50,7 @@ public abstract class AbstractHistogramAggregator extends BucketsAggregator {
|
|||
protected final long minDocCount;
|
||||
protected final double minBound;
|
||||
protected final double maxBound;
|
||||
protected final DoubleBounds hardBounds;
|
||||
protected final LongKeyedBucketOrds bucketOrds;
|
||||
|
||||
public AbstractHistogramAggregator(
|
||||
|
@ -62,6 +63,7 @@ public abstract class AbstractHistogramAggregator extends BucketsAggregator {
|
|||
long minDocCount,
|
||||
double minBound,
|
||||
double maxBound,
|
||||
DoubleBounds hardBounds,
|
||||
DocValueFormat formatter,
|
||||
SearchContext context,
|
||||
Aggregator parent,
|
||||
|
@ -80,6 +82,7 @@ public abstract class AbstractHistogramAggregator extends BucketsAggregator {
|
|||
this.minDocCount = minDocCount;
|
||||
this.minBound = minBound;
|
||||
this.maxBound = maxBound;
|
||||
this.hardBounds = hardBounds;
|
||||
this.formatter = formatter;
|
||||
bucketOrds = LongKeyedBucketOrds.build(context.bigArrays(), cardinalityUpperBound);
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Rounding;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -98,8 +99,11 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
|
|||
|
||||
PARSER.declareLong(DateHistogramAggregationBuilder::minDocCount, Histogram.MIN_DOC_COUNT_FIELD);
|
||||
|
||||
PARSER.declareField(DateHistogramAggregationBuilder::extendedBounds, parser -> ExtendedBounds.PARSER.apply(parser, null),
|
||||
ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
PARSER.declareField(DateHistogramAggregationBuilder::extendedBounds, parser -> LongBounds.PARSER.apply(parser, null),
|
||||
Histogram.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareField(DateHistogramAggregationBuilder::hardBounds, parser -> LongBounds.PARSER.apply(parser, null),
|
||||
Histogram.HARD_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareObjectArray(DateHistogramAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p),
|
||||
Histogram.ORDER_FIELD);
|
||||
|
@ -111,7 +115,8 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
|
|||
|
||||
private DateIntervalWrapper dateHistogramInterval = new DateIntervalWrapper();
|
||||
private long offset = 0;
|
||||
private ExtendedBounds extendedBounds;
|
||||
private LongBounds extendedBounds;
|
||||
private LongBounds hardBounds;
|
||||
private BucketOrder order = BucketOrder.key(true);
|
||||
private boolean keyed = false;
|
||||
private long minDocCount = 0;
|
||||
|
@ -127,13 +132,14 @@ public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuil
|
|||
this.dateHistogramInterval = clone.dateHistogramInterval;
|
||||
this.offset = clone.offset;
|
||||
this.extendedBounds = clone.extendedBounds;
|
||||
this.hardBounds = clone.hardBounds;
|
||||
this.order = clone.order;
|
||||
this.keyed = clone.keyed;
|
||||
this.minDocCount = clone.minDocCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) {
|
||||
return new DateHistogramAggregationBuilder(this, factoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
|
@ -145,7 +151,10 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
minDocCount = in.readVLong();
|
||||
dateHistogramInterval = new DateIntervalWrapper(in);
|
||||
offset = in.readLong();
|
||||
extendedBounds = in.readOptionalWriteable(ExtendedBounds::new);
|
||||
extendedBounds = in.readOptionalWriteable(LongBounds::new);
|
||||
if (in.getVersion().onOrAfter(Version.V_7_10_0)) {
|
||||
hardBounds = in.readOptionalWriteable(LongBounds::new);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -162,6 +171,9 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
dateHistogramInterval.writeTo(out);
|
||||
out.writeLong(offset);
|
||||
out.writeOptionalWriteable(extendedBounds);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_10_0)) {
|
||||
out.writeOptionalWriteable(hardBounds);
|
||||
}
|
||||
}
|
||||
|
||||
/** Get the current interval in milliseconds that is set on this builder. */
|
||||
|
@ -287,13 +299,13 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
}
|
||||
|
||||
/** Return extended bounds for this histogram, or {@code null} if none are set. */
|
||||
public ExtendedBounds extendedBounds() {
|
||||
public LongBounds extendedBounds() {
|
||||
return extendedBounds;
|
||||
}
|
||||
|
||||
/** Set extended bounds on this histogram, so that buckets would also be
|
||||
* generated on intervals that did not match any documents. */
|
||||
public DateHistogramAggregationBuilder extendedBounds(ExtendedBounds extendedBounds) {
|
||||
public DateHistogramAggregationBuilder extendedBounds(LongBounds extendedBounds) {
|
||||
if (extendedBounds == null) {
|
||||
throw new IllegalArgumentException("[extendedBounds] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -301,6 +313,21 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
return this;
|
||||
}
|
||||
|
||||
|
||||
/** Return hard bounds for this histogram, or {@code null} if none are set. */
|
||||
public LongBounds hardBounds() {
|
||||
return hardBounds;
|
||||
}
|
||||
|
||||
/** Set hard bounds on this histogram, specifying boundaries outside which buckets cannot be created. */
|
||||
public DateHistogramAggregationBuilder hardBounds(LongBounds hardBounds) {
|
||||
if (hardBounds == null) {
|
||||
throw new IllegalArgumentException("[hardBounds] must not be null: [" + name + "]");
|
||||
}
|
||||
this.hardBounds = hardBounds;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Return the order to use to sort buckets of this histogram. */
|
||||
public BucketOrder order() {
|
||||
return order;
|
||||
|
@ -384,9 +411,16 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
builder.field(Histogram.MIN_DOC_COUNT_FIELD.getPreferredName(), minDocCount);
|
||||
|
||||
if (extendedBounds != null) {
|
||||
builder.startObject(Histogram.EXTENDED_BOUNDS_FIELD.getPreferredName());
|
||||
extendedBounds.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
if (hardBounds != null) {
|
||||
builder.startObject(Histogram.HARD_BOUNDS_FIELD.getPreferredName());
|
||||
hardBounds.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -403,11 +437,33 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
final ZoneId tz = timeZone();
|
||||
final Rounding rounding = dateHistogramInterval.createRounding(tz, offset);
|
||||
|
||||
ExtendedBounds roundedBounds = null;
|
||||
LongBounds roundedBounds = null;
|
||||
if (this.extendedBounds != null) {
|
||||
// parse any string bounds to longs and round
|
||||
roundedBounds = this.extendedBounds.parseAndValidate(name, queryShardContext, config.format()).round(rounding);
|
||||
roundedBounds = this.extendedBounds.parseAndValidate(name, "extended_bounds" , queryShardContext, config.format())
|
||||
.round(rounding);
|
||||
}
|
||||
|
||||
LongBounds roundedHardBounds = null;
|
||||
if (this.hardBounds != null) {
|
||||
// parse any string bounds to longs and round
|
||||
roundedHardBounds = this.hardBounds.parseAndValidate(name, "hard_bounds" , queryShardContext, config.format())
|
||||
.round(rounding);
|
||||
}
|
||||
|
||||
if (roundedBounds != null && roundedHardBounds != null) {
|
||||
if (roundedBounds.getMax() != null &&
|
||||
roundedHardBounds.getMax() != null && roundedBounds.getMax() > roundedHardBounds.getMax()) {
|
||||
throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" +
|
||||
hardBounds + "], extended bounds: [" + extendedBounds + "]");
|
||||
}
|
||||
if (roundedBounds.getMin() != null &&
|
||||
roundedHardBounds.getMin() != null && roundedBounds.getMin() < roundedHardBounds.getMin()) {
|
||||
throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" +
|
||||
hardBounds + "], extended bounds: [" + extendedBounds + "]");
|
||||
}
|
||||
}
|
||||
|
||||
return new DateHistogramAggregatorFactory(
|
||||
name,
|
||||
config,
|
||||
|
@ -416,16 +472,16 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
minDocCount,
|
||||
rounding,
|
||||
roundedBounds,
|
||||
roundedHardBounds,
|
||||
queryShardContext,
|
||||
parent,
|
||||
subFactoriesBuilder,
|
||||
metadata
|
||||
);
|
||||
metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), order, keyed, minDocCount, dateHistogramInterval, minDocCount, extendedBounds);
|
||||
return Objects.hash(super.hashCode(), order, keyed, minDocCount, dateHistogramInterval, minDocCount, extendedBounds, hardBounds);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -439,6 +495,7 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu
|
|||
&& Objects.equals(minDocCount, other.minDocCount)
|
||||
&& Objects.equals(dateHistogramInterval, other.dateHistogramInterval)
|
||||
&& Objects.equals(offset, other.offset)
|
||||
&& Objects.equals(extendedBounds, other.extendedBounds);
|
||||
&& Objects.equals(extendedBounds, other.extendedBounds)
|
||||
&& Objects.equals(hardBounds, other.hardBounds);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,8 @@ public interface DateHistogramAggregationSupplier extends AggregatorSupplier {
|
|||
BucketOrder order,
|
||||
boolean keyed,
|
||||
long minDocCount,
|
||||
@Nullable ExtendedBounds extendedBounds,
|
||||
@Nullable LongBounds extendedBounds,
|
||||
@Nullable LongBounds hardBounds,
|
||||
ValuesSourceConfig valuesSourceConfig,
|
||||
SearchContext aggregationContext,
|
||||
Aggregator parent,
|
||||
|
|
|
@ -63,7 +63,8 @@ class DateHistogramAggregator extends BucketsAggregator {
|
|||
private final boolean keyed;
|
||||
|
||||
private final long minDocCount;
|
||||
private final ExtendedBounds extendedBounds;
|
||||
private final LongBounds extendedBounds;
|
||||
private final LongBounds hardBounds;
|
||||
|
||||
private final LongKeyedBucketOrds bucketOrds;
|
||||
|
||||
|
@ -75,7 +76,8 @@ class DateHistogramAggregator extends BucketsAggregator {
|
|||
BucketOrder order,
|
||||
boolean keyed,
|
||||
long minDocCount,
|
||||
@Nullable ExtendedBounds extendedBounds,
|
||||
@Nullable LongBounds extendedBounds,
|
||||
@Nullable LongBounds hardBounds,
|
||||
ValuesSourceConfig valuesSourceConfig,
|
||||
SearchContext aggregationContext,
|
||||
Aggregator parent,
|
||||
|
@ -91,6 +93,7 @@ class DateHistogramAggregator extends BucketsAggregator {
|
|||
this.keyed = keyed;
|
||||
this.minDocCount = minDocCount;
|
||||
this.extendedBounds = extendedBounds;
|
||||
this.hardBounds = hardBounds;
|
||||
// TODO: Stop using null here
|
||||
this.valuesSource = valuesSourceConfig.hasValues() ? (ValuesSource.Numeric) valuesSourceConfig.getValuesSource() : null;
|
||||
this.formatter = valuesSourceConfig.format();
|
||||
|
@ -126,12 +129,14 @@ class DateHistogramAggregator extends BucketsAggregator {
|
|||
if (rounded == previousRounded) {
|
||||
continue;
|
||||
}
|
||||
long bucketOrd = bucketOrds.add(owningBucketOrd, rounded);
|
||||
if (bucketOrd < 0) { // already seen
|
||||
bucketOrd = -1 - bucketOrd;
|
||||
collectExistingBucket(sub, doc, bucketOrd);
|
||||
} else {
|
||||
collectBucket(sub, doc, bucketOrd);
|
||||
if (hardBounds == null || hardBounds.contain(rounded)) {
|
||||
long bucketOrd = bucketOrds.add(owningBucketOrd, rounded);
|
||||
if (bucketOrd < 0) { // already seen
|
||||
bucketOrd = -1 - bucketOrd;
|
||||
collectExistingBucket(sub, doc, bucketOrd);
|
||||
} else {
|
||||
collectBucket(sub, doc, bucketOrd);
|
||||
}
|
||||
}
|
||||
previousRounded = rounded;
|
||||
}
|
||||
|
|
|
@ -53,7 +53,8 @@ public final class DateHistogramAggregatorFactory extends ValuesSourceAggregator
|
|||
private final BucketOrder order;
|
||||
private final boolean keyed;
|
||||
private final long minDocCount;
|
||||
private final ExtendedBounds extendedBounds;
|
||||
private final LongBounds extendedBounds;
|
||||
private final LongBounds hardBounds;
|
||||
private final Rounding rounding;
|
||||
|
||||
public DateHistogramAggregatorFactory(
|
||||
|
@ -63,7 +64,8 @@ public final class DateHistogramAggregatorFactory extends ValuesSourceAggregator
|
|||
boolean keyed,
|
||||
long minDocCount,
|
||||
Rounding rounding,
|
||||
ExtendedBounds extendedBounds,
|
||||
LongBounds extendedBounds,
|
||||
LongBounds hardBounds,
|
||||
QueryShardContext queryShardContext,
|
||||
AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
|
@ -74,6 +76,7 @@ public final class DateHistogramAggregatorFactory extends ValuesSourceAggregator
|
|||
this.keyed = keyed;
|
||||
this.minDocCount = minDocCount;
|
||||
this.extendedBounds = extendedBounds;
|
||||
this.hardBounds = hardBounds;
|
||||
this.rounding = rounding;
|
||||
}
|
||||
|
||||
|
@ -107,6 +110,7 @@ public final class DateHistogramAggregatorFactory extends ValuesSourceAggregator
|
|||
keyed,
|
||||
minDocCount,
|
||||
extendedBounds,
|
||||
hardBounds,
|
||||
config,
|
||||
searchContext,
|
||||
parent,
|
||||
|
@ -119,7 +123,7 @@ public final class DateHistogramAggregatorFactory extends ValuesSourceAggregator
|
|||
protected Aggregator createUnmapped(SearchContext searchContext,
|
||||
Aggregator parent,
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
return new DateHistogramAggregator(name, factories, rounding, null, order, keyed, minDocCount, extendedBounds,
|
||||
return new DateHistogramAggregator(name, factories, rounding, null, order, keyed, minDocCount, extendedBounds, hardBounds,
|
||||
config, searchContext, parent, CardinalityUpperBound.NONE, metadata);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,6 +48,9 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import static java.lang.Long.max;
|
||||
import static java.lang.Long.min;
|
||||
|
||||
/**
|
||||
* An aggregator for date values. Every date is rounded down using a configured
|
||||
* {@link Rounding}.
|
||||
|
@ -67,7 +70,8 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
|
|||
private final boolean keyed;
|
||||
|
||||
private final long minDocCount;
|
||||
private final ExtendedBounds extendedBounds;
|
||||
private final LongBounds extendedBounds;
|
||||
private final LongBounds hardBounds;
|
||||
|
||||
private final LongKeyedBucketOrds bucketOrds;
|
||||
|
||||
|
@ -79,7 +83,8 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
|
|||
BucketOrder order,
|
||||
boolean keyed,
|
||||
long minDocCount,
|
||||
@Nullable ExtendedBounds extendedBounds,
|
||||
@Nullable LongBounds extendedBounds,
|
||||
@Nullable LongBounds hardBounds,
|
||||
ValuesSourceConfig valuesSourceConfig,
|
||||
SearchContext aggregationContext,
|
||||
Aggregator parent,
|
||||
|
@ -95,6 +100,7 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
|
|||
this.keyed = keyed;
|
||||
this.minDocCount = minDocCount;
|
||||
this.extendedBounds = extendedBounds;
|
||||
this.hardBounds = hardBounds;
|
||||
// TODO: Stop using null here
|
||||
this.valuesSource = valuesSourceConfig.hasValues() ? (ValuesSource.Range) valuesSourceConfig.getValuesSource() : null;
|
||||
this.formatter = valuesSourceConfig.format();
|
||||
|
@ -140,9 +146,13 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
|
|||
// The encoding should ensure that this assert is always true.
|
||||
assert from >= previousFrom : "Start of range not >= previous start";
|
||||
final Long to = (Long) range.getTo();
|
||||
final long startKey = preparedRounding.round(from);
|
||||
final long endKey = preparedRounding.round(to);
|
||||
for (long key = startKey > previousKey ? startKey : previousKey; key <= endKey;
|
||||
final long effectiveFrom = (hardBounds != null && hardBounds.getMin() != null) ?
|
||||
max(from, hardBounds.getMin()) : from;
|
||||
final long effectiveTo = (hardBounds != null && hardBounds.getMax() != null) ?
|
||||
min(to, hardBounds.getMax()) : to;
|
||||
final long startKey = preparedRounding.round(effectiveFrom);
|
||||
final long endKey = preparedRounding.round(effectiveTo);
|
||||
for (long key = max(startKey, previousKey); key <= endKey;
|
||||
key = preparedRounding.nextRoundingValue(key)) {
|
||||
if (key == previousKey) {
|
||||
continue;
|
||||
|
@ -167,6 +177,8 @@ class DateRangeHistogramAggregator extends BucketsAggregator {
|
|||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException {
|
||||
return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds,
|
||||
|
|
|
@ -0,0 +1,150 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.InstantiatingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
/**
|
||||
* Represent hard_bounds and extended_bounds in histogram aggregations.
|
||||
*
|
||||
* This class is similar to {@link LongBounds} used in date histograms, but is using longs to store data. LongBounds and DoubleBounds are
|
||||
* not used interchangeably and therefore don't share any common interfaces except for serialization.
|
||||
*/
|
||||
|
||||
public class DoubleBounds implements ToXContentFragment, Writeable {
|
||||
static final ParseField MIN_FIELD = new ParseField("min");
|
||||
static final ParseField MAX_FIELD = new ParseField("max");
|
||||
static final InstantiatingObjectParser<DoubleBounds, Void> PARSER;
|
||||
|
||||
static {
|
||||
InstantiatingObjectParser.Builder<DoubleBounds, Void> parser =
|
||||
InstantiatingObjectParser.builder("double_bounds", false, DoubleBounds.class);
|
||||
parser.declareField(optionalConstructorArg(), p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.doubleValue(),
|
||||
MIN_FIELD, ObjectParser.ValueType.DOUBLE_OR_NULL);
|
||||
parser.declareField(optionalConstructorArg(), p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.doubleValue(),
|
||||
MAX_FIELD, ObjectParser.ValueType.DOUBLE_OR_NULL);
|
||||
PARSER = parser.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Min value
|
||||
*/
|
||||
private final Double min;
|
||||
|
||||
/**
|
||||
* Max value
|
||||
*/
|
||||
private final Double max;
|
||||
|
||||
/**
|
||||
* Construct with bounds.
|
||||
*/
|
||||
public DoubleBounds(Double min, Double max) {
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DoubleBounds(StreamInput in) throws IOException {
|
||||
min = in.readOptionalDouble();
|
||||
max = in.readOptionalDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalDouble(min);
|
||||
out.writeOptionalDouble(max);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (min != null) {
|
||||
builder.field(MIN_FIELD.getPreferredName(), min);
|
||||
}
|
||||
if (max != null) {
|
||||
builder.field(MAX_FIELD.getPreferredName(), max);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(min, max);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
DoubleBounds other = (DoubleBounds) obj;
|
||||
return Objects.equals(min, other.min)
|
||||
&& Objects.equals(max, other.max);
|
||||
}
|
||||
|
||||
public Double getMin() {
|
||||
return min;
|
||||
}
|
||||
|
||||
public Double getMax() {
|
||||
return max;
|
||||
}
|
||||
|
||||
public boolean contain(double value) {
|
||||
if (max != null && value > max) {
|
||||
return false;
|
||||
}
|
||||
if (min != null && value < min) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
if (min != null) {
|
||||
b.append(min);
|
||||
}
|
||||
b.append("--");
|
||||
if (max != null) {
|
||||
b.append(max);
|
||||
}
|
||||
return b.toString();
|
||||
}
|
||||
}
|
|
@ -34,6 +34,7 @@ public interface Histogram extends MultiBucketsAggregation {
|
|||
ParseField KEYED_FIELD = new ParseField("keyed");
|
||||
ParseField MIN_DOC_COUNT_FIELD = new ParseField("min_doc_count");
|
||||
ParseField EXTENDED_BOUNDS_FIELD = new ParseField("extended_bounds");
|
||||
ParseField HARD_BOUNDS_FIELD = new ParseField("hard_bounds");
|
||||
|
||||
/**
|
||||
* A bucket in the histogram where documents fall in
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -73,7 +74,10 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
|
||||
PARSER.declareField((histogram, extendedBounds) -> {
|
||||
histogram.extendedBounds(extendedBounds[0], extendedBounds[1]);
|
||||
}, parser -> EXTENDED_BOUNDS_PARSER.apply(parser, null), ExtendedBounds.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
}, parser -> EXTENDED_BOUNDS_PARSER.apply(parser, null), Histogram.EXTENDED_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareField(HistogramAggregationBuilder::hardBounds, parser -> DoubleBounds.PARSER.apply(parser, null),
|
||||
Histogram.HARD_BOUNDS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
|
||||
PARSER.declareObjectArray(HistogramAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p),
|
||||
Histogram.ORDER_FIELD);
|
||||
|
@ -85,8 +89,10 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
|
||||
private double interval;
|
||||
private double offset = 0;
|
||||
//TODO: Replace with DoubleBounds
|
||||
private double minBound = Double.POSITIVE_INFINITY;
|
||||
private double maxBound = Double.NEGATIVE_INFINITY;
|
||||
private DoubleBounds hardBounds;
|
||||
private BucketOrder order = BucketOrder.key(true);
|
||||
private boolean keyed = false;
|
||||
private long minDocCount = 0;
|
||||
|
@ -109,6 +115,7 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
this.offset = clone.offset;
|
||||
this.minBound = clone.minBound;
|
||||
this.maxBound = clone.maxBound;
|
||||
this.hardBounds = clone.hardBounds;
|
||||
this.order = clone.order;
|
||||
this.keyed = clone.keyed;
|
||||
this.minDocCount = clone.minDocCount;
|
||||
|
@ -129,6 +136,9 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
offset = in.readDouble();
|
||||
minBound = in.readDouble();
|
||||
maxBound = in.readDouble();
|
||||
if (in.getVersion().onOrAfter(Version.V_7_10_0)) {
|
||||
hardBounds = in.readOptionalWriteable(DoubleBounds::new);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -140,6 +150,9 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
out.writeDouble(offset);
|
||||
out.writeDouble(minBound);
|
||||
out.writeDouble(maxBound);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_10_0)) {
|
||||
out.writeOptionalWriteable(hardBounds);
|
||||
}
|
||||
}
|
||||
|
||||
/** Get the current interval that is set on this builder. */
|
||||
|
@ -201,6 +214,17 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set hard bounds on this histogram, specifying boundaries outside which buckets cannot be created.
|
||||
*/
|
||||
public HistogramAggregationBuilder hardBounds(DoubleBounds hardBounds) {
|
||||
if (hardBounds == null) {
|
||||
throw new IllegalArgumentException("[hardBounds] must not be null: [" + name + "]");
|
||||
}
|
||||
this.hardBounds = hardBounds;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Return the order to use to sort buckets of this histogram. */
|
||||
public BucketOrder order() {
|
||||
return order;
|
||||
|
@ -307,13 +331,24 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
ValuesSourceConfig config,
|
||||
AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder) throws IOException {
|
||||
|
||||
if (hardBounds != null) {
|
||||
if (hardBounds.getMax() != null && hardBounds.getMax() < maxBound) {
|
||||
throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" +
|
||||
hardBounds + "], extended bounds: [" + minBound + "--" + maxBound + "]");
|
||||
}
|
||||
if (hardBounds.getMin() != null && hardBounds.getMin() > minBound) {
|
||||
throw new IllegalArgumentException("Extended bounds have to be inside hard bounds, hard bounds: [" +
|
||||
hardBounds + "], extended bounds: [" + minBound + "--" + maxBound + "]");
|
||||
}
|
||||
}
|
||||
return new HistogramAggregatorFactory(name, config, interval, offset, order, keyed, minDocCount, minBound, maxBound,
|
||||
queryShardContext, parent, subFactoriesBuilder, metadata);
|
||||
hardBounds, queryShardContext, parent, subFactoriesBuilder, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), order, keyed, minDocCount, interval, offset, minBound, maxBound);
|
||||
return Objects.hash(super.hashCode(), order, keyed, minDocCount, interval, offset, minBound, maxBound, hardBounds);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -328,6 +363,7 @@ public class HistogramAggregationBuilder extends ValuesSourceAggregationBuilder<
|
|||
&& Objects.equals(interval, other.interval)
|
||||
&& Objects.equals(offset, other.offset)
|
||||
&& Objects.equals(minBound, other.minBound)
|
||||
&& Objects.equals(maxBound, other.maxBound);
|
||||
&& Objects.equals(maxBound, other.maxBound)
|
||||
&& Objects.equals(hardBounds, other.hardBounds);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact
|
|||
private final boolean keyed;
|
||||
private final long minDocCount;
|
||||
private final double minBound, maxBound;
|
||||
private final DoubleBounds hardBounds;
|
||||
|
||||
static void registerAggregators(ValuesSourceRegistry.Builder builder) {
|
||||
builder.register(HistogramAggregationBuilder.NAME, CoreValuesSourceType.RANGE,
|
||||
|
@ -67,6 +68,7 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact
|
|||
long minDocCount,
|
||||
double minBound,
|
||||
double maxBound,
|
||||
DoubleBounds hardBounds,
|
||||
QueryShardContext queryShardContext,
|
||||
AggregatorFactory parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder,
|
||||
|
@ -79,6 +81,7 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact
|
|||
this.minDocCount = minDocCount;
|
||||
this.minBound = minBound;
|
||||
this.maxBound = maxBound;
|
||||
this.hardBounds = hardBounds;
|
||||
}
|
||||
|
||||
public long minDocCount() {
|
||||
|
@ -98,7 +101,7 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact
|
|||
}
|
||||
HistogramAggregatorSupplier histogramAggregatorSupplier = (HistogramAggregatorSupplier) aggregatorSupplier;
|
||||
return histogramAggregatorSupplier.build(name, factories, interval, offset, order, keyed, minDocCount, minBound, maxBound,
|
||||
config, searchContext, parent, cardinality, metadata);
|
||||
hardBounds, config, searchContext, parent, cardinality, metadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -106,6 +109,6 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact
|
|||
Aggregator parent,
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
return new NumericHistogramAggregator(name, factories, interval, offset, order, keyed, minDocCount, minBound, maxBound,
|
||||
config, searchContext, parent, CardinalityUpperBound.NONE, metadata);
|
||||
hardBounds, config, searchContext, parent, CardinalityUpperBound.NONE, metadata);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@ public interface HistogramAggregatorSupplier extends AggregatorSupplier {
|
|||
long minDocCount,
|
||||
double minBound,
|
||||
double maxBound,
|
||||
DoubleBounds hardBounds,
|
||||
ValuesSourceConfig valuesSourceConfig,
|
||||
SearchContext context,
|
||||
Aggregator parent,
|
||||
|
|
|
@ -160,13 +160,13 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
|
|||
|
||||
final Rounding rounding;
|
||||
final InternalAggregations subAggregations;
|
||||
final ExtendedBounds bounds;
|
||||
final LongBounds bounds;
|
||||
|
||||
EmptyBucketInfo(Rounding rounding, InternalAggregations subAggregations) {
|
||||
this(rounding, subAggregations, null);
|
||||
}
|
||||
|
||||
EmptyBucketInfo(Rounding rounding, InternalAggregations subAggregations, ExtendedBounds bounds) {
|
||||
EmptyBucketInfo(Rounding rounding, InternalAggregations subAggregations, LongBounds bounds) {
|
||||
this.rounding = rounding;
|
||||
this.subAggregations = subAggregations;
|
||||
this.bounds = bounds;
|
||||
|
@ -175,7 +175,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
|
|||
EmptyBucketInfo(StreamInput in) throws IOException {
|
||||
rounding = Rounding.read(in);
|
||||
subAggregations = InternalAggregations.readFrom(in);
|
||||
bounds = in.readOptionalWriteable(ExtendedBounds::new);
|
||||
bounds = in.readOptionalWriteable(LongBounds::new);
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
|
@ -377,7 +377,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation<
|
|||
|
||||
private void addEmptyBuckets(List<Bucket> list, ReduceContext reduceContext) {
|
||||
Bucket lastBucket = null;
|
||||
ExtendedBounds bounds = emptyBucketInfo.bounds;
|
||||
LongBounds bounds = emptyBucketInfo.bounds;
|
||||
ListIterator<Bucket> iter = list.listIterator();
|
||||
|
||||
// first adding all the empty buckets *before* the actual data (based on th extended_bounds.min the user requested)
|
||||
|
|
|
@ -39,13 +39,18 @@ import java.util.Objects;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class ExtendedBounds implements ToXContentFragment, Writeable {
|
||||
static final ParseField EXTENDED_BOUNDS_FIELD = Histogram.EXTENDED_BOUNDS_FIELD;
|
||||
/**
|
||||
* Represent hard_bounds and extended_bounds in date-histogram aggregations.
|
||||
*
|
||||
* This class is similar to {@link DoubleBounds} used in histograms, but is using longs to store data. LongBounds and DoubleBounds are
|
||||
* * not used interchangeably and therefore don't share any common interfaces except for serialization.
|
||||
*/
|
||||
public class LongBounds implements ToXContentFragment, Writeable {
|
||||
static final ParseField MIN_FIELD = new ParseField("min");
|
||||
static final ParseField MAX_FIELD = new ParseField("max");
|
||||
|
||||
public static final ConstructingObjectParser<ExtendedBounds, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"extended_bounds", a -> {
|
||||
public static final ConstructingObjectParser<LongBounds, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"bounds", a -> {
|
||||
assert a.length == 2;
|
||||
Long min = null;
|
||||
Long max = null;
|
||||
|
@ -69,7 +74,7 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
} else {
|
||||
throw new IllegalArgumentException("Unknown field type [" + a[1].getClass() + "]");
|
||||
}
|
||||
return new ExtendedBounds(min, max, minAsStr, maxAsStr);
|
||||
return new LongBounds(min, max, minAsStr, maxAsStr);
|
||||
});
|
||||
static {
|
||||
CheckedFunction<XContentParser, Object, IOException> longOrString = p -> {
|
||||
|
@ -105,21 +110,21 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
/**
|
||||
* Construct with parsed bounds.
|
||||
*/
|
||||
public ExtendedBounds(Long min, Long max) {
|
||||
public LongBounds(Long min, Long max) {
|
||||
this(min, max, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct with unparsed bounds.
|
||||
*/
|
||||
public ExtendedBounds(String minAsStr, String maxAsStr) {
|
||||
public LongBounds(String minAsStr, String maxAsStr) {
|
||||
this(null, null, minAsStr, maxAsStr);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct with all possible information.
|
||||
*/
|
||||
private ExtendedBounds(Long min, Long max, String minAsStr, String maxAsStr) {
|
||||
private LongBounds(Long min, Long max, String minAsStr, String maxAsStr) {
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
this.minAsStr = minAsStr;
|
||||
|
@ -129,7 +134,7 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ExtendedBounds(StreamInput in) throws IOException {
|
||||
public LongBounds(StreamInput in) throws IOException {
|
||||
min = in.readOptionalLong();
|
||||
max = in.readOptionalLong();
|
||||
minAsStr = in.readOptionalString();
|
||||
|
@ -147,7 +152,7 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
/**
|
||||
* Parse the bounds and perform any delayed validation. Returns the result of the parsing.
|
||||
*/
|
||||
ExtendedBounds parseAndValidate(String aggName, QueryShardContext queryShardContext, DocValueFormat format) {
|
||||
LongBounds parseAndValidate(String aggName, String boundsName, QueryShardContext queryShardContext, DocValueFormat format) {
|
||||
Long min = this.min;
|
||||
Long max = this.max;
|
||||
assert format != null;
|
||||
|
@ -159,23 +164,22 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
max = format.parseLong(maxAsStr, false, queryShardContext::nowInMillis);
|
||||
}
|
||||
if (min != null && max != null && min.compareTo(max) > 0) {
|
||||
throw new IllegalArgumentException("[extended_bounds.min][" + min + "] cannot be greater than " +
|
||||
"[extended_bounds.max][" + max + "] for histogram aggregation [" + aggName + "]");
|
||||
throw new IllegalArgumentException("[" + boundsName + ".min][" + min + "] cannot be greater than " +
|
||||
"[" + boundsName + ".max][" + max + "] for histogram aggregation [" + aggName + "]");
|
||||
}
|
||||
return new ExtendedBounds(min, max, minAsStr, maxAsStr);
|
||||
return new LongBounds(min, max, minAsStr, maxAsStr);
|
||||
}
|
||||
|
||||
ExtendedBounds round(Rounding rounding) {
|
||||
LongBounds round(Rounding rounding) {
|
||||
// Extended bounds shouldn't be effected by the offset
|
||||
Rounding effectiveRounding = rounding.withoutOffset();
|
||||
return new ExtendedBounds(
|
||||
return new LongBounds(
|
||||
min != null ? effectiveRounding.round(min) : null,
|
||||
max != null ? effectiveRounding.round(max) : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(EXTENDED_BOUNDS_FIELD.getPreferredName());
|
||||
if (min != null) {
|
||||
builder.field(MIN_FIELD.getPreferredName(), min);
|
||||
} else {
|
||||
|
@ -186,7 +190,6 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
} else {
|
||||
builder.field(MAX_FIELD.getPreferredName(), maxAsStr);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -203,7 +206,7 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ExtendedBounds other = (ExtendedBounds) obj;
|
||||
LongBounds other = (LongBounds) obj;
|
||||
return Objects.equals(min, other.min)
|
||||
&& Objects.equals(max, other.max)
|
||||
&& Objects.equals(minAsStr, other.minAsStr)
|
||||
|
@ -218,6 +221,16 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
return max;
|
||||
}
|
||||
|
||||
public boolean contain(long value) {
|
||||
if (max != null && value >= max) {
|
||||
return false;
|
||||
}
|
||||
if (min != null && value < min) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
@ -233,7 +246,7 @@ public class ExtendedBounds implements ToXContentFragment, Writeable {
|
|||
}
|
||||
b.append("--");
|
||||
if (max != null) {
|
||||
b.append(min);
|
||||
b.append(max);
|
||||
if (maxAsStr != null) {
|
||||
b.append('(').append(maxAsStr).append(')');
|
||||
}
|
|
@ -54,6 +54,7 @@ public class NumericHistogramAggregator extends AbstractHistogramAggregator {
|
|||
long minDocCount,
|
||||
double minBound,
|
||||
double maxBound,
|
||||
DoubleBounds hardBounds,
|
||||
ValuesSourceConfig valuesSourceConfig,
|
||||
SearchContext context,
|
||||
Aggregator parent,
|
||||
|
@ -70,6 +71,7 @@ public class NumericHistogramAggregator extends AbstractHistogramAggregator {
|
|||
minDocCount,
|
||||
minBound,
|
||||
maxBound,
|
||||
hardBounds,
|
||||
valuesSourceConfig.format(),
|
||||
context,
|
||||
parent,
|
||||
|
@ -110,12 +112,14 @@ public class NumericHistogramAggregator extends AbstractHistogramAggregator {
|
|||
if (key == previousKey) {
|
||||
continue;
|
||||
}
|
||||
long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key));
|
||||
if (bucketOrd < 0) { // already seen
|
||||
bucketOrd = -1 - bucketOrd;
|
||||
collectExistingBucket(sub, doc, bucketOrd);
|
||||
} else {
|
||||
collectBucket(sub, doc, bucketOrd);
|
||||
if (hardBounds == null || hardBounds.contain(key)) {
|
||||
long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key));
|
||||
if (bucketOrd < 0) { // already seen
|
||||
bucketOrd = -1 - bucketOrd;
|
||||
collectExistingBucket(sub, doc, bucketOrd);
|
||||
} else {
|
||||
collectBucket(sub, doc, bucketOrd);
|
||||
}
|
||||
}
|
||||
previousKey = key;
|
||||
}
|
||||
|
|
|
@ -51,6 +51,7 @@ public class RangeHistogramAggregator extends AbstractHistogramAggregator {
|
|||
long minDocCount,
|
||||
double minBound,
|
||||
double maxBound,
|
||||
DoubleBounds hardBounds,
|
||||
ValuesSourceConfig valuesSourceConfig,
|
||||
SearchContext context,
|
||||
Aggregator parent,
|
||||
|
@ -67,6 +68,7 @@ public class RangeHistogramAggregator extends AbstractHistogramAggregator {
|
|||
minDocCount,
|
||||
minBound,
|
||||
maxBound,
|
||||
hardBounds,
|
||||
valuesSourceConfig.format(),
|
||||
context,
|
||||
parent,
|
||||
|
@ -108,9 +110,13 @@ public class RangeHistogramAggregator extends AbstractHistogramAggregator {
|
|||
// The encoding should ensure that this assert is always true.
|
||||
assert from >= previousFrom : "Start of range not >= previous start";
|
||||
final Double to = rangeType.doubleValue(range.getTo());
|
||||
final double startKey = Math.floor((from - offset) / interval);
|
||||
final double endKey = Math.floor((to - offset) / interval);
|
||||
for (double key = startKey > previousKey ? startKey : previousKey; key <= endKey; key++) {
|
||||
final double effectiveFrom = (hardBounds != null && hardBounds.getMin() != null) ?
|
||||
Double.max(from, hardBounds.getMin()) : from;
|
||||
final double effectiveTo = (hardBounds != null && hardBounds.getMax() != null) ?
|
||||
Double.min(to, hardBounds.getMax()) : to;
|
||||
final double startKey = Math.floor((effectiveFrom - offset) / interval);
|
||||
final double endKey = Math.floor((effectiveTo - offset) / interval);
|
||||
for (double key = Math.max(startKey, previousKey); key <= endKey; key++) {
|
||||
if (key == previousKey) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -1136,6 +1136,29 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas
|
|||
assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.");
|
||||
}
|
||||
|
||||
|
||||
public void testOverlappingBounds() {
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(),
|
||||
Arrays.asList(
|
||||
"2017-02-01",
|
||||
"2017-02-02",
|
||||
"2017-02-02",
|
||||
"2017-02-03",
|
||||
"2017-02-03",
|
||||
"2017-02-03",
|
||||
"2017-02-05"
|
||||
),
|
||||
aggregation -> aggregation .calendarInterval(DateHistogramInterval.DAY)
|
||||
.hardBounds(new LongBounds("2010-01-01", "2020-01-01"))
|
||||
.extendedBounds(new LongBounds("2009-01-01", "2021-01-01"))
|
||||
.field(AGGREGABLE_DATE),
|
||||
histogram -> {}, false
|
||||
));
|
||||
|
||||
assertThat(ex.getMessage(), equalTo("Extended bounds have to be inside hard bounds, " +
|
||||
"hard bounds: [2010-01-01--2020-01-01], extended bounds: [2009-01-01--2021-01-01]"));
|
||||
}
|
||||
|
||||
public void testIllegalInterval() throws IOException {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchAllDocsQuery(),
|
||||
Collections.emptyList(),
|
||||
|
|
|
@ -59,7 +59,7 @@ public class DateHistogramTests extends BaseAggregationTestCase<DateHistogramAgg
|
|||
}
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
factory.extendedBounds(ExtendedBoundsTests.randomExtendedBounds());
|
||||
factory.extendedBounds(LongBoundsTests.randomExtendedBounds());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
factory.format("###.##");
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.time.DateFormatters;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -46,6 +47,7 @@ import java.time.ZonedDateTime;
|
|||
import java.util.function.Consumer;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DateRangeHistogramAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
|
@ -658,6 +660,150 @@ public class DateRangeHistogramAggregatorTests extends AggregatorTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
public void testHardBounds() throws Exception {
|
||||
RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"),
|
||||
asLong("2019-08-02T05:45:00"), true, true);
|
||||
RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"),
|
||||
asLong("2019-08-02T17:45:00"), true, true);
|
||||
|
||||
testCase(
|
||||
Queries.newMatchAllQuery(),
|
||||
builder -> builder.calendarInterval(DateHistogramInterval.HOUR).hardBounds(
|
||||
new LongBounds("2019-08-02T03:00:00", "2019-08-02T10:00:00")),
|
||||
writer -> {
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1)))));
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2)))));
|
||||
},
|
||||
histo -> {
|
||||
assertEquals(8, histo.getBuckets().size());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T03:00:00"), histo.getBuckets().get(0).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(0).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T05:00:00"), histo.getBuckets().get(2).getKey());
|
||||
assertEquals(2, histo.getBuckets().get(2).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T10:00:00"), histo.getBuckets().get(7).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(7).getDocCount());
|
||||
|
||||
assertTrue(AggregationInspectionHelper.hasValue(histo));
|
||||
}
|
||||
);
|
||||
}
|
||||
public void testHardBoundsWithOpenRanges() throws Exception {
|
||||
RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, Long.MIN_VALUE,
|
||||
asLong("2019-08-02T05:45:00"), true, true);
|
||||
RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"),
|
||||
Long.MAX_VALUE, true, true);
|
||||
|
||||
testCase(
|
||||
Queries.newMatchAllQuery(),
|
||||
builder -> builder.calendarInterval(DateHistogramInterval.HOUR).hardBounds(
|
||||
new LongBounds("2019-08-02T03:00:00", "2019-08-02T10:00:00")),
|
||||
writer -> {
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1)))));
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2)))));
|
||||
},
|
||||
histo -> {
|
||||
assertEquals(8, histo.getBuckets().size());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T03:00:00"), histo.getBuckets().get(0).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(0).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T05:00:00"), histo.getBuckets().get(2).getKey());
|
||||
assertEquals(2, histo.getBuckets().get(2).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T10:00:00"), histo.getBuckets().get(7).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(7).getDocCount());
|
||||
|
||||
assertTrue(AggregationInspectionHelper.hasValue(histo));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
public void testBothBounds() throws Exception {
|
||||
RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"),
|
||||
asLong("2019-08-02T05:45:00"), true, true);
|
||||
RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"),
|
||||
asLong("2019-08-02T17:45:00"), true, true);
|
||||
|
||||
testCase(
|
||||
Queries.newMatchAllQuery(),
|
||||
builder -> builder.calendarInterval(DateHistogramInterval.HOUR)
|
||||
.hardBounds(new LongBounds("2019-08-02T00:00:00", "2019-08-02T10:00:00"))
|
||||
.extendedBounds(new LongBounds("2019-08-02T01:00:00", "2019-08-02T08:00:00")),
|
||||
writer -> {
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1)))));
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2)))));
|
||||
},
|
||||
histo -> {
|
||||
assertEquals(10, histo.getBuckets().size());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T01:00:00"), histo.getBuckets().get(0).getKey());
|
||||
assertEquals(0, histo.getBuckets().get(0).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T02:00:00"), histo.getBuckets().get(1).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(1).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T10:00:00"), histo.getBuckets().get(9).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(9).getDocCount());
|
||||
|
||||
assertTrue(AggregationInspectionHelper.hasValue(histo));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
public void testOverlappingBounds() throws Exception {
|
||||
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> testCase(
|
||||
Queries.newMatchAllQuery(),
|
||||
builder -> builder.calendarInterval(DateHistogramInterval.HOUR)
|
||||
.hardBounds(new LongBounds("2019-08-02T01:00:00", "2019-08-02T08:00:00"))
|
||||
.extendedBounds(new LongBounds("2019-08-02T00:00:00", "2019-08-02T10:00:00")),
|
||||
writer -> {
|
||||
|
||||
},
|
||||
histo -> {
|
||||
fail("Shouldn't be here");
|
||||
}
|
||||
));
|
||||
|
||||
assertThat(ex.getMessage(), equalTo("Extended bounds have to be inside hard bounds, " +
|
||||
"hard bounds: [2019-08-02T01:00:00--2019-08-02T08:00:00], extended bounds: [2019-08-02T00:00:00--2019-08-02T10:00:00]"));
|
||||
}
|
||||
|
||||
public void testEqualBounds() throws Exception {
|
||||
RangeFieldMapper.Range range1 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T02:15:00"),
|
||||
asLong("2019-08-02T05:45:00"), true, true);
|
||||
RangeFieldMapper.Range range2 = new RangeFieldMapper.Range(RangeType.DATE, asLong("2019-08-02T05:15:00"),
|
||||
asLong("2019-08-02T17:45:00"), true, true);
|
||||
|
||||
testCase(
|
||||
Queries.newMatchAllQuery(),
|
||||
builder -> builder.calendarInterval(DateHistogramInterval.HOUR)
|
||||
.hardBounds(new LongBounds("2019-08-02T00:00:00", "2019-08-02T10:00:00"))
|
||||
.extendedBounds(new LongBounds("2019-08-02T00:00:00", "2019-08-02T10:00:00")),
|
||||
writer -> {
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range1)))));
|
||||
writer.addDocument(singleton(new BinaryDocValuesField(FIELD_NAME, RangeType.DATE.encodeRanges(singleton(range2)))));
|
||||
},
|
||||
histo -> {
|
||||
assertEquals(11, histo.getBuckets().size());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T00:00:00"), histo.getBuckets().get(0).getKey());
|
||||
assertEquals(0, histo.getBuckets().get(0).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T02:00:00"), histo.getBuckets().get(2).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(2).getDocCount());
|
||||
|
||||
assertEquals(asZDT("2019-08-02T10:00:00"), histo.getBuckets().get(10).getKey());
|
||||
assertEquals(1, histo.getBuckets().get(10).getDocCount());
|
||||
|
||||
assertTrue(AggregationInspectionHelper.hasValue(histo));
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private void testCase(Query query,
|
||||
Consumer<DateHistogramAggregationBuilder> configure,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
|
||||
|
@ -673,19 +819,18 @@ public class DateRangeHistogramAggregatorTests extends AggregatorTestCase {
|
|||
private void testCase(DateHistogramAggregationBuilder aggregationBuilder, Query query,
|
||||
CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalDateHistogram> verify,
|
||||
MappedFieldType fieldType) throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
try(Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
|
||||
verify.accept(histogram);
|
||||
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
|
||||
verify.accept(histogram);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static long asLong(String dateTime) {
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class DoubleBoundsTests extends ESTestCase {
|
||||
/**
|
||||
* Construct a random {@link DoubleBounds}
|
||||
*/
|
||||
public static DoubleBounds randomBounds() {
|
||||
Double min = randomDouble();
|
||||
Double max = randomValueOtherThan(min, ESTestCase::randomDouble);
|
||||
if (min > max) {
|
||||
double temp = min;
|
||||
min = max;
|
||||
max = temp;
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
// Construct with one missing bound
|
||||
if (randomBoolean()) {
|
||||
return new DoubleBounds(null, max);
|
||||
}
|
||||
return new DoubleBounds(min, null);
|
||||
}
|
||||
return new DoubleBounds(min, max);
|
||||
}
|
||||
|
||||
public void testTransportRoundTrip() throws IOException {
|
||||
DoubleBounds orig = randomBounds();
|
||||
|
||||
BytesReference origBytes;
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
orig.writeTo(out);
|
||||
origBytes = out.bytes();
|
||||
}
|
||||
|
||||
DoubleBounds read;
|
||||
try (StreamInput in = origBytes.streamInput()) {
|
||||
read = new DoubleBounds(in);
|
||||
assertEquals("read fully", 0, in.available());
|
||||
}
|
||||
assertEquals(orig, read);
|
||||
|
||||
BytesReference readBytes;
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
read.writeTo(out);
|
||||
readBytes = out.bytes();
|
||||
}
|
||||
|
||||
assertEquals(origBytes, readBytes);
|
||||
}
|
||||
|
||||
public void testXContentRoundTrip() throws Exception {
|
||||
DoubleBounds orig = randomBounds();
|
||||
|
||||
try (XContentBuilder out = JsonXContent.contentBuilder()) {
|
||||
out.startObject();
|
||||
orig.toXContent(out, ToXContent.EMPTY_PARAMS);
|
||||
out.endObject();
|
||||
|
||||
try (XContentParser in = createParser(JsonXContent.jsonXContent, BytesReference.bytes(out))) {
|
||||
XContentParser.Token token = in.currentToken();
|
||||
assertNull(token);
|
||||
|
||||
token = in.nextToken();
|
||||
assertThat(token, equalTo(XContentParser.Token.START_OBJECT));
|
||||
|
||||
DoubleBounds read = DoubleBounds.PARSER.apply(in, null);
|
||||
assertEquals(orig, read);
|
||||
} catch (Exception e) {
|
||||
throw new Exception("Error parsing [" + BytesReference.bytes(out).utf8ToString() + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -65,13 +65,13 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe
|
|||
emptyBucketInfo = null;
|
||||
} else {
|
||||
minDocCount = 0;
|
||||
ExtendedBounds extendedBounds = null;
|
||||
LongBounds extendedBounds = null;
|
||||
if (randomBoolean()) {
|
||||
//it's ok if min and max are outside the range of the generated buckets, that will just mean that
|
||||
//empty buckets won't be added before the first bucket and/or after the last one
|
||||
long min = baseMillis - intervalMillis * randomNumberOfBuckets();
|
||||
long max = baseMillis + randomNumberOfBuckets() * intervalMillis;
|
||||
extendedBounds = new ExtendedBounds(min, max);
|
||||
extendedBounds = new LongBounds(min, max);
|
||||
}
|
||||
emptyBucketInfo = new InternalDateHistogram.EmptyBucketInfo(rounding, InternalAggregations.EMPTY, extendedBounds);
|
||||
}
|
||||
|
|
|
@ -44,12 +44,12 @@ import static java.lang.Math.max;
|
|||
import static java.lang.Math.min;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ExtendedBoundsTests extends ESTestCase {
|
||||
public class LongBoundsTests extends ESTestCase {
|
||||
/**
|
||||
* Construct a random {@link ExtendedBounds}.
|
||||
* Construct a random {@link LongBounds}.
|
||||
*/
|
||||
public static ExtendedBounds randomExtendedBounds() {
|
||||
ExtendedBounds bounds = randomParsedExtendedBounds();
|
||||
public static LongBounds randomExtendedBounds() {
|
||||
LongBounds bounds = randomParsedExtendedBounds();
|
||||
if (randomBoolean()) {
|
||||
bounds = unparsed(bounds);
|
||||
}
|
||||
|
@ -57,17 +57,17 @@ public class ExtendedBoundsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Construct a random {@link ExtendedBounds} in pre-parsed form.
|
||||
* Construct a random {@link LongBounds} in pre-parsed form.
|
||||
*/
|
||||
public static ExtendedBounds randomParsedExtendedBounds() {
|
||||
public static LongBounds randomParsedExtendedBounds() {
|
||||
long maxDateValue = 253402300799999L; // end of year 9999
|
||||
long minDateValue = -377705116800000L; // beginning of year -9999
|
||||
if (randomBoolean()) {
|
||||
// Construct with one missing bound
|
||||
if (randomBoolean()) {
|
||||
return new ExtendedBounds(null, maxDateValue);
|
||||
return new LongBounds(null, maxDateValue);
|
||||
}
|
||||
return new ExtendedBounds(minDateValue, null);
|
||||
return new LongBounds(minDateValue, null);
|
||||
}
|
||||
long a = randomLongBetween(minDateValue, maxDateValue);
|
||||
long b;
|
||||
|
@ -76,18 +76,18 @@ public class ExtendedBoundsTests extends ESTestCase {
|
|||
} while (a == b);
|
||||
long min = min(a, b);
|
||||
long max = max(a, b);
|
||||
return new ExtendedBounds(min, max);
|
||||
return new LongBounds(min, max);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an extended bounds in parsed for into one in unparsed form.
|
||||
*/
|
||||
public static ExtendedBounds unparsed(ExtendedBounds template) {
|
||||
public static LongBounds unparsed(LongBounds template) {
|
||||
// It'd probably be better to randomize the formatter
|
||||
DateFormatter formatter = DateFormatter.forPattern("strict_date_time").withZone(ZoneOffset.UTC);
|
||||
String minAsStr = template.getMin() == null ? null : formatter.formatMillis(template.getMin());
|
||||
String maxAsStr = template.getMax() == null ? null : formatter.formatMillis(template.getMax());
|
||||
return new ExtendedBounds(minAsStr, maxAsStr);
|
||||
return new LongBounds(minAsStr, maxAsStr);
|
||||
}
|
||||
|
||||
public void testParseAndValidate() {
|
||||
|
@ -101,33 +101,33 @@ public class ExtendedBoundsTests extends ESTestCase {
|
|||
DateFormatter formatter = DateFormatter.forPattern("dateOptionalTime");
|
||||
DocValueFormat format = new DocValueFormat.DateTime(formatter, ZoneOffset.UTC, DateFieldMapper.Resolution.MILLISECONDS);
|
||||
|
||||
ExtendedBounds expected = randomParsedExtendedBounds();
|
||||
ExtendedBounds parsed = unparsed(expected).parseAndValidate("test", qsc, format);
|
||||
LongBounds expected = randomParsedExtendedBounds();
|
||||
LongBounds parsed = unparsed(expected).parseAndValidate("test", "extended_bounds", qsc, format);
|
||||
// parsed won't *equal* expected because equal includes the String parts
|
||||
assertEquals(expected.getMin(), parsed.getMin());
|
||||
assertEquals(expected.getMax(), parsed.getMax());
|
||||
|
||||
parsed = new ExtendedBounds("now", null).parseAndValidate("test", qsc, format);
|
||||
parsed = new LongBounds("now", null).parseAndValidate("test", "extended_bounds", qsc, format);
|
||||
assertEquals(now, (long) parsed.getMin());
|
||||
assertNull(parsed.getMax());
|
||||
|
||||
parsed = new ExtendedBounds(null, "now").parseAndValidate("test", qsc, format);
|
||||
parsed = new LongBounds(null, "now").parseAndValidate("test", "extended_bounds", qsc, format);
|
||||
assertNull(parsed.getMin());
|
||||
assertEquals(now, (long) parsed.getMax());
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new ExtendedBounds(100L, 90L).parseAndValidate("test", qsc, format));
|
||||
() -> new LongBounds(100L, 90L).parseAndValidate("test", "extended_bounds", qsc, format));
|
||||
assertEquals("[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]",
|
||||
e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> unparsed(new ExtendedBounds(100L, 90L)).parseAndValidate("test", qsc, format));
|
||||
() -> unparsed(new LongBounds(100L, 90L)).parseAndValidate("test", "extended_bounds", qsc, format));
|
||||
assertEquals("[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]",
|
||||
e.getMessage());
|
||||
}
|
||||
|
||||
public void testTransportRoundTrip() throws IOException {
|
||||
ExtendedBounds orig = randomExtendedBounds();
|
||||
LongBounds orig = randomExtendedBounds();
|
||||
|
||||
BytesReference origBytes;
|
||||
try (BytesStreamOutput out = new BytesStreamOutput()) {
|
||||
|
@ -135,9 +135,9 @@ public class ExtendedBoundsTests extends ESTestCase {
|
|||
origBytes = out.bytes();
|
||||
}
|
||||
|
||||
ExtendedBounds read;
|
||||
LongBounds read;
|
||||
try (StreamInput in = origBytes.streamInput()) {
|
||||
read = new ExtendedBounds(in);
|
||||
read = new LongBounds(in);
|
||||
assertEquals("read fully", 0, in.available());
|
||||
}
|
||||
assertEquals(orig, read);
|
||||
|
@ -152,7 +152,7 @@ public class ExtendedBoundsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testXContentRoundTrip() throws Exception {
|
||||
ExtendedBounds orig = randomExtendedBounds();
|
||||
LongBounds orig = randomExtendedBounds();
|
||||
|
||||
try (XContentBuilder out = JsonXContent.contentBuilder()) {
|
||||
out.startObject();
|
||||
|
@ -166,11 +166,7 @@ public class ExtendedBoundsTests extends ESTestCase {
|
|||
token = in.nextToken();
|
||||
assertThat(token, equalTo(XContentParser.Token.START_OBJECT));
|
||||
|
||||
token = in.nextToken();
|
||||
assertThat(token, equalTo(XContentParser.Token.FIELD_NAME));
|
||||
assertThat(in.currentName(), equalTo(ExtendedBounds.EXTENDED_BOUNDS_FIELD.getPreferredName()));
|
||||
|
||||
ExtendedBounds read = ExtendedBounds.PARSER.apply(in, null);
|
||||
LongBounds read = LongBounds.PARSER.apply(in, null);
|
||||
assertEquals(orig, read);
|
||||
} catch (Exception e) {
|
||||
throw new Exception("Error parsing [" + BytesReference.bytes(out).utf8ToString() + "]", e);
|
|
@ -16,6 +16,7 @@ import org.elasticsearch.search.aggregations.CardinalityUpperBound;
|
|||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.AbstractHistogramAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DoubleBounds;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.xpack.analytics.aggregations.support.HistogramValuesSource;
|
||||
|
@ -37,12 +38,13 @@ public class HistoBackedHistogramAggregator extends AbstractHistogramAggregator
|
|||
long minDocCount,
|
||||
double minBound,
|
||||
double maxBound,
|
||||
DoubleBounds hardBounds,
|
||||
ValuesSourceConfig valuesSourceConfig,
|
||||
SearchContext context,
|
||||
Aggregator parent,
|
||||
CardinalityUpperBound cardinalityUpperBound,
|
||||
Map<String, Object> metadata) throws IOException {
|
||||
super(name, factories, interval, offset, order, keyed, minDocCount, minBound, maxBound,
|
||||
super(name, factories, interval, offset, order, keyed, minDocCount, minBound, maxBound, hardBounds,
|
||||
valuesSourceConfig.format(), context, parent, cardinalityUpperBound, metadata);
|
||||
|
||||
// TODO: Stop using null here
|
||||
|
|
|
@ -13,7 +13,7 @@ import org.elasticsearch.search.SearchModule;
|
|||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.LongBounds;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.GeoDistanceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
|
@ -59,7 +59,7 @@ public class RollupRequestTranslationTests extends ESTestCase {
|
|||
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
|
||||
histo.calendarInterval(new DateHistogramInterval("1d"))
|
||||
.field("foo")
|
||||
.extendedBounds(new ExtendedBounds(0L, 1000L))
|
||||
.extendedBounds(new LongBounds(0L, 1000L))
|
||||
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"))
|
||||
.subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field"));
|
||||
|
||||
|
@ -95,7 +95,7 @@ public class RollupRequestTranslationTests extends ESTestCase {
|
|||
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
|
||||
histo.calendarInterval(new DateHistogramInterval("1d"))
|
||||
.field("foo")
|
||||
.extendedBounds(new ExtendedBounds(0L, 1000L))
|
||||
.extendedBounds(new LongBounds(0L, 1000L))
|
||||
.format("yyyy-MM-dd")
|
||||
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"));
|
||||
|
||||
|
|
Loading…
Reference in New Issue