Fix AutoIntervalDateHistogram.testReduce random failures (#32301)
1. Refactor the test to use the same roundings as the implementation. 2. Refactor the test verification logic to use `innerIntervals` when rounding.
This commit is contained in:
parent
97b379e0d4
commit
6f93911955
|
@ -42,6 +42,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|||
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -53,7 +54,7 @@ public class AutoDateHistogramAggregationBuilder
|
|||
|
||||
public static final String NAME = "auto_date_histogram";
|
||||
|
||||
public static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets");
|
||||
private static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets");
|
||||
|
||||
private static final ObjectParser<AutoDateHistogramAggregationBuilder, Void> PARSER;
|
||||
static {
|
||||
|
@ -63,6 +64,29 @@ public class AutoDateHistogramAggregationBuilder
|
|||
PARSER.declareInt(AutoDateHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Build roundings, computed dynamically as roundings are time zone dependent.
|
||||
* The current implementation probably should not be invoked in a tight loop.
|
||||
* @return Array of RoundingInfo
|
||||
*/
|
||||
static RoundingInfo[] buildRoundings(DateTimeZone timeZone) {
|
||||
RoundingInfo[] roundings = new RoundingInfo[6];
|
||||
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE, timeZone),
|
||||
1000L, 1, 5, 10, 30);
|
||||
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR, timeZone),
|
||||
60 * 1000L, 1, 5, 10, 30);
|
||||
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY, timeZone),
|
||||
60 * 60 * 1000L, 1, 3, 12);
|
||||
roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH, timeZone),
|
||||
24 * 60 * 60 * 1000L, 1, 7);
|
||||
roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR, timeZone),
|
||||
30 * 24 * 60 * 60 * 1000L, 1, 3);
|
||||
roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY, timeZone),
|
||||
365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100);
|
||||
return roundings;
|
||||
}
|
||||
|
||||
public static AutoDateHistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, new AutoDateHistogramAggregationBuilder(aggregationName), null);
|
||||
}
|
||||
|
@ -116,14 +140,7 @@ public class AutoDateHistogramAggregationBuilder
|
|||
@Override
|
||||
protected ValuesSourceAggregatorFactory<Numeric, ?> innerBuild(SearchContext context, ValuesSourceConfig<Numeric> config,
|
||||
AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException {
|
||||
RoundingInfo[] roundings = new RoundingInfo[6];
|
||||
roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE), 1000L, 1, 5, 10, 30);
|
||||
roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR), 60 * 1000L, 1, 5, 10, 30);
|
||||
roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY), 60 * 60 * 1000L, 1, 3, 12);
|
||||
roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH), 24 * 60 * 60 * 1000L, 1, 7);
|
||||
roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR), 30 * 24 * 60 * 60 * 1000L, 1, 3);
|
||||
roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY), 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100);
|
||||
|
||||
RoundingInfo[] roundings = buildRoundings(timeZone());
|
||||
int maxRoundingInterval = Arrays.stream(roundings,0, roundings.length-1)
|
||||
.map(rounding -> rounding.innerIntervals)
|
||||
.flatMapToInt(Arrays::stream)
|
||||
|
@ -139,10 +156,10 @@ public class AutoDateHistogramAggregationBuilder
|
|||
return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData);
|
||||
}
|
||||
|
||||
private Rounding createRounding(DateTimeUnit interval) {
|
||||
private static Rounding createRounding(DateTimeUnit interval, DateTimeZone timeZone) {
|
||||
Rounding.Builder tzRoundingBuilder = Rounding.builder(interval);
|
||||
if (timeZone() != null) {
|
||||
tzRoundingBuilder.timeZone(timeZone());
|
||||
if (timeZone != null) {
|
||||
tzRoundingBuilder.timeZone(timeZone);
|
||||
}
|
||||
Rounding rounding = tzRoundingBuilder.build();
|
||||
return rounding;
|
||||
|
|
|
@ -447,7 +447,8 @@ public final class InternalAutoDateHistogram extends
|
|||
return new BucketReduceResult(list, roundingInfo, roundingIdx);
|
||||
}
|
||||
|
||||
private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, RoundingInfo[] roundings) {
|
||||
private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx,
|
||||
RoundingInfo[] roundings) {
|
||||
if (roundingIdx == roundings.length - 1) {
|
||||
return roundingIdx;
|
||||
}
|
||||
|
@ -509,7 +510,8 @@ public final class InternalAutoDateHistogram extends
|
|||
pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, ReduceContext reduceContext) {
|
||||
private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult,
|
||||
ReduceContext reduceContext) {
|
||||
List<Bucket> buckets = reducedBucketsResult.buckets;
|
||||
RoundingInfo roundingInfo = reducedBucketsResult.roundingInfo;
|
||||
int roundingIdx = reducedBucketsResult.roundingIdx;
|
||||
|
@ -539,7 +541,7 @@ public final class InternalAutoDateHistogram extends
|
|||
key = roundingInfo.rounding.round(bucket.key);
|
||||
}
|
||||
reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1);
|
||||
sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations));
|
||||
sameKeyedBuckets.add(new Bucket(Math.round(key), bucket.docCount, format, bucket.aggregations));
|
||||
}
|
||||
if (sameKeyedBuckets.isEmpty() == false) {
|
||||
reduceContext.consumeBucketsAndMaybeBreak(1);
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.rounding.DateTimeUnit;
|
||||
import org.elasticsearch.common.rounding.Rounding;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
|
||||
|
@ -51,14 +49,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
format = randomNumericDocValueFormat();
|
||||
|
||||
roundingInfos = new RoundingInfo[6];
|
||||
roundingInfos[0] = new RoundingInfo(Rounding.builder(DateTimeUnit.SECOND_OF_MINUTE).build(), 1, 5, 10, 30);
|
||||
roundingInfos[1] = new RoundingInfo(Rounding.builder(DateTimeUnit.MINUTES_OF_HOUR).build(), 1, 5, 10, 30);
|
||||
roundingInfos[2] = new RoundingInfo(Rounding.builder(DateTimeUnit.HOUR_OF_DAY).build(), 1, 3, 12);
|
||||
roundingInfos[3] = new RoundingInfo(Rounding.builder(DateTimeUnit.DAY_OF_MONTH).build(), 1, 7);
|
||||
roundingInfos[4] = new RoundingInfo(Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).build(), 1, 3);
|
||||
roundingInfos[5] = new RoundingInfo(Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).build(), 1, 10, 20, 50, 100);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -66,6 +56,8 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData,
|
||||
InternalAggregations aggregations) {
|
||||
|
||||
roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null);
|
||||
int nbBuckets = randomNumberOfBuckets();
|
||||
int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1);
|
||||
List<InternalAutoDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets);
|
||||
|
@ -81,6 +73,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList());
|
||||
BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations);
|
||||
|
||||
|
||||
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
@ -92,13 +85,50 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
roundingIdx = histogram.getBucketInfo().roundingIdx;
|
||||
}
|
||||
}
|
||||
Map<Long, Long> expectedCounts = new TreeMap<>();
|
||||
for (Histogram histogram : inputs) {
|
||||
RoundingInfo roundingInfo = roundingInfos[roundingIdx];
|
||||
|
||||
long lowest = Long.MAX_VALUE;
|
||||
long highest = 0;
|
||||
for (InternalAutoDateHistogram histogram : inputs) {
|
||||
for (Histogram.Bucket bucket : histogram.getBuckets()) {
|
||||
expectedCounts.compute(roundingInfos[roundingIdx].rounding.round(((DateTime) bucket.getKey()).getMillis()),
|
||||
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
|
||||
long bucketKey = ((DateTime) bucket.getKey()).getMillis();
|
||||
if (bucketKey < lowest) {
|
||||
lowest = bucketKey;
|
||||
}
|
||||
if (bucketKey > highest) {
|
||||
highest = bucketKey;
|
||||
}
|
||||
}
|
||||
}
|
||||
long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis();
|
||||
long innerIntervalToUse = 0;
|
||||
for (int interval : roundingInfo.innerIntervals) {
|
||||
if (normalizedDuration / interval < maxNumberOfBuckets()) {
|
||||
innerIntervalToUse = interval;
|
||||
}
|
||||
}
|
||||
Map<Long, Long> expectedCounts = new TreeMap<>();
|
||||
long intervalInMillis = innerIntervalToUse*roundingInfo.getRoughEstimateDurationMillis();
|
||||
for (long keyForBucket = roundingInfo.rounding.round(lowest);
|
||||
keyForBucket <= highest;
|
||||
keyForBucket = keyForBucket + intervalInMillis) {
|
||||
expectedCounts.put(keyForBucket, 0L);
|
||||
|
||||
for (InternalAutoDateHistogram histogram : inputs) {
|
||||
for (Histogram.Bucket bucket : histogram.getBuckets()) {
|
||||
long bucketKey = ((DateTime) bucket.getKey()).getMillis();
|
||||
long roundedBucketKey = roundingInfo.rounding.round(bucketKey);
|
||||
if (roundedBucketKey >= keyForBucket
|
||||
&& roundedBucketKey < keyForBucket + intervalInMillis) {
|
||||
long count = bucket.getDocCount();
|
||||
expectedCounts.compute(keyForBucket,
|
||||
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + count);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Map<Long, Long> actualCounts = new TreeMap<>();
|
||||
for (Histogram.Bucket bucket : reduced.getBuckets()) {
|
||||
actualCounts.compute(((DateTime) bucket.getKey()).getMillis(),
|
||||
|
@ -117,12 +147,6 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
|
|||
return ParsedAutoDateHistogram.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32215")
|
||||
public void testReduceRandom() {
|
||||
super.testReduceRandom();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) {
|
||||
String name = instance.getName();
|
||||
|
|
Loading…
Reference in New Issue