Add interval response parameter to AutoDateInterval histogram (#33254)

Adds the interval used to the aggregation response.
This commit is contained in:
Paul Sanwald 2018-09-05 07:35:59 -04:00 committed by GitHub
parent 4156cc3fae
commit c303006e6b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 74 additions and 28 deletions

View File

@ -81,7 +81,8 @@ Response:
"key": 1425168000000,
"doc_count": 2
}
]
],
"interval": "1M"
}
}
}
@ -174,7 +175,8 @@ starting at midnight UTC on 1 October 2015:
"key": 1443664800000,
"doc_count": 1
}
]
],
"interval": "1h"
}
}
}
@ -229,7 +231,8 @@ the specified time zone.
"key": 1443664800000,
"doc_count": 1
}
]
],
"interval": "1h"
}
}
}

View File

@ -73,17 +73,17 @@ public class AutoDateHistogramAggregationBuilder
static RoundingInfo[] buildRoundings(DateTimeZone timeZone) {
RoundingInfo[] roundings = new RoundingInfo[6];
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
1000L, 1, 5, 10, 30);
1000L, "s" , 1, 5, 10, 30);
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, 1, 5, 10, 30);
60 * 1000L, "m", 1, 5, 10, 30);
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
60 * 60 * 1000L, 1, 3, 12);
60 * 60 * 1000L, "h", 1, 3, 12);
roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone),
24 * 60 * 60 * 1000L, 1, 7);
24 * 60 * 60 * 1000L, "d", 1, 7);
roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone),
30 * 24 * 60 * 60 * 1000L, 1, 3);
30 * 24 * 60 * 60 * 1000L, "M", 1, 3);
roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone),
365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100);
365 * 24 * 60 * 60 * 1000L, "y", 1, 5, 10, 20, 50, 100);
return roundings;
}
@ -186,10 +186,12 @@ public class AutoDateHistogramAggregationBuilder
final Rounding rounding;
final int[] innerIntervals;
final long roughEstimateDurationMillis;
final String unitAbbreviation;
public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, int... innerIntervals) {
public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, String unitAbbreviation, int... innerIntervals) {
this.rounding = rounding;
this.roughEstimateDurationMillis = roughEstimateDurationMillis;
this.unitAbbreviation = unitAbbreviation;
this.innerIntervals = innerIntervals;
}
@ -197,6 +199,7 @@ public class AutoDateHistogramAggregationBuilder
rounding = Rounding.Streams.read(in);
roughEstimateDurationMillis = in.readVLong();
innerIntervals = in.readIntArray();
unitAbbreviation = in.readString();
}
@Override
@ -204,6 +207,7 @@ public class AutoDateHistogramAggregationBuilder
Rounding.Streams.write(rounding, out);
out.writeVLong(roughEstimateDurationMillis);
out.writeIntArray(innerIntervals);
out.writeString(unitAbbreviation);
}
public int getMaximumInnerInterval() {

View File

@ -181,7 +181,8 @@ class AutoDateHistogramAggregator extends DeferableBucketAggregator {
InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx,
buildEmptySubAggregations());
return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo, formatter, pipelineAggregators(), metaData());
return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo,
formatter, pipelineAggregators(), metaData(), 1);
}
@Override
@ -189,7 +190,7 @@ class AutoDateHistogramAggregator extends DeferableBucketAggregator {
InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx,
buildEmptySubAggregations());
return new InternalAutoDateHistogram(name, Collections.emptyList(), targetBuckets, emptyBucketInfo, formatter,
pipelineAggregators(), metaData());
pipelineAggregators(), metaData(), 1);
}
@Override

View File

@ -208,15 +208,16 @@ public final class InternalAutoDateHistogram extends
private final DocValueFormat format;
private final BucketInfo bucketInfo;
private final int targetBuckets;
private long bucketInnerInterval;
InternalAutoDateHistogram(String name, List<Bucket> buckets, int targetBuckets, BucketInfo emptyBucketInfo, DocValueFormat formatter,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData, long bucketInnerInterval) {
super(name, pipelineAggregators, metaData);
this.buckets = buckets;
this.bucketInfo = emptyBucketInfo;
this.format = formatter;
this.targetBuckets = targetBuckets;
this.bucketInnerInterval = bucketInnerInterval;
}
/**
@ -238,6 +239,13 @@ public final class InternalAutoDateHistogram extends
out.writeVInt(targetBuckets);
}
public DateHistogramInterval getInterval() {
RoundingInfo roundingInfo = this.bucketInfo.roundingInfos[this.bucketInfo.roundingIdx];
String unitAbbreviation = roundingInfo.unitAbbreviation;
return new DateHistogramInterval(Long.toString(bucketInnerInterval) + unitAbbreviation);
}
@Override
public String getWriteableName() {
return AutoDateHistogramAggregationBuilder.NAME;
@ -262,7 +270,7 @@ public final class InternalAutoDateHistogram extends
@Override
public InternalAutoDateHistogram create(List<Bucket> buckets) {
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData, 1);
}
@Override
@ -279,7 +287,6 @@ public final class InternalAutoDateHistogram extends
this.iterator = iterator;
current = iterator.next();
}
}
/**
@ -365,7 +372,7 @@ public final class InternalAutoDateHistogram extends
reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx];
reducedBuckets = mergeBuckets(reducedBuckets, reduceRoundingInfo.rounding, reduceContext);
}
return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx);
return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx, 1);
}
private List<Bucket> mergeBuckets(List<Bucket> reducedBuckets, Rounding reduceRounding, ReduceContext reduceContext) {
@ -403,12 +410,13 @@ public final class InternalAutoDateHistogram extends
List<Bucket> buckets;
RoundingInfo roundingInfo;
int roundingIdx;
long innerInterval;
BucketReduceResult(List<Bucket> buckets, RoundingInfo roundingInfo, int roundingIdx) {
BucketReduceResult(List<Bucket> buckets, RoundingInfo roundingInfo, int roundingIdx, long innerInterval) {
this.buckets = buckets;
this.roundingInfo = roundingInfo;
this.roundingIdx = roundingIdx;
this.innerInterval = innerInterval;
}
}
@ -444,7 +452,7 @@ public final class InternalAutoDateHistogram extends
}
lastBucket = iter.next();
}
return new BucketReduceResult(list, roundingInfo, roundingIdx);
return new BucketReduceResult(list, roundingInfo, roundingIdx, currentResult.innerInterval);
}
static int getAppropriateRounding(long minKey, long maxKey, int roundingIdx,
@ -507,7 +515,7 @@ public final class InternalAutoDateHistogram extends
this.bucketInfo.emptySubAggregations);
return new InternalAutoDateHistogram(getName(), reducedBucketsResult.buckets, targetBuckets, bucketInfo, format,
pipelineAggregators(), getMetaData());
pipelineAggregators(), getMetaData(), reducedBucketsResult.innerInterval);
}
private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult,
@ -547,7 +555,7 @@ public final class InternalAutoDateHistogram extends
reduceContext.consumeBucketsAndMaybeBreak(1);
mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, roundingInfo.rounding, reduceContext));
}
return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx);
return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx, mergeInterval);
}
@Override
@ -557,6 +565,7 @@ public final class InternalAutoDateHistogram extends
bucket.toXContent(builder, params);
}
builder.endArray();
builder.field("interval", getInterval().toString());
return builder;
}
@ -580,7 +589,7 @@ public final class InternalAutoDateHistogram extends
buckets2.add((Bucket) b);
}
buckets2 = Collections.unmodifiableList(buckets2);
return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, pipelineAggregators(), getMetaData());
return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, pipelineAggregators(), getMetaData(), 1);
}
@Override

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.bucket.histogram;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@ -36,6 +37,16 @@ public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation<Parsed
return AutoDateHistogramAggregationBuilder.NAME;
}
private String interval;
public String getInterval() {
return interval;
}
public void setInterval(String interval) {
this.interval = interval;
}
@Override
public List<? extends Histogram.Bucket> getBuckets() {
return buckets;
@ -47,6 +58,8 @@ public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation<Parsed
declareMultiBucketAggregationFields(PARSER,
parser -> ParsedBucket.fromXContent(parser, false),
parser -> ParsedBucket.fromXContent(parser, true));
PARSER.declareString((parsed, value) -> parsed.interval = value,
new ParseField("interval"));
}
public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String name) throws IOException {
@ -55,6 +68,14 @@ public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation<Parsed
return aggregation;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder = super.doXContentBody(builder, params);
builder.field("interval", getInterval());
return builder;
}
public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket {
private Long key;

View File

@ -80,7 +80,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
}
InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList());
BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData, 1);
}
/*
@ -94,11 +94,11 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
// an innerInterval that is quite large, such that targetBuckets * roundings[i].getMaximumInnerInterval()
// will be larger than the estimate.
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
1000L, 1000);
1000L, "s", 1000);
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
60 * 1000L, 1, 5, 10, 30);
60 * 1000L, "m", 1, 5, 10, 30);
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
60 * 60 * 1000L, 1, 3, 12);
60 * 60 * 1000L, "h", 1, 3, 12);
OffsetDateTime timestamp = Instant.parse("2018-01-01T00:00:01.000Z").atOffset(ZoneOffset.UTC);
// We want to pass a roundingIdx of zero, because in order to reproduce this bug, we need the function
@ -198,6 +198,14 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
}
assertEquals(expectedCounts, actualCounts);
DateHistogramInterval expectedInterval;
if (reduced.getBuckets().size() == 1) {
expectedInterval = reduced.getInterval();
} else {
expectedInterval = new DateHistogramInterval(innerIntervalToUse+roundingInfo.unitAbbreviation);
}
assertThat(reduced.getInterval(), equalTo(expectedInterval));
}
private int getBucketCount(long lowest, long highest, RoundingInfo roundingInfo, long intervalInMillis) {
@ -252,6 +260,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
default:
throw new AssertionError("Illegal randomisation branch");
}
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData, 1);
}
}