From 7fd94f7d0f901305c330972cf7bdf791eca4dcbd Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 3 Jun 2020 12:07:58 -0400 Subject: [PATCH] Test: Protect auto_date_histo from 0 buckets The test for `auto_date_histogram` as trying to round `Long.MAX_VALUE` if there were 0 buckets. That doesn't work. Also, this replaces all of the class variables created to make consistent random result when testing `InternalAutoDateHistogram` with the newer `randomResultsToReduce` which is a little simpler to understand. --- .../InternalAutoDateHistogramTests.java | 156 ++++++++++-------- 1 file changed, 89 insertions(+), 67 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java index 9bad91ed6ce..7e144f11497 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -53,35 +53,19 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase { - - private DocValueFormat format; - private RoundingInfo[] roundingInfos; - private long defaultStart; - private int roundingIndex; - - @Override - public void setUp() throws Exception { - super.setUp(); - format = randomNumericDocValueFormat(); - defaultStart = randomLongBetween(0, utcMillis("2050-01-01")); - roundingIndex = between(0, AutoDateHistogramAggregationBuilder.buildRoundings(null, null).length - 1); - } - - @Override - protected InternalAutoDateHistogram createTestInstance(String name, - Map metadata, - InternalAggregations aggregations) { - - roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null); + protected InternalAutoDateHistogram createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + long startingDate, + RoundingInfo[] roundingInfos, + int roundingIndex, + DocValueFormat format + ) { int nbBuckets = randomNumberOfBuckets(); int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); List buckets = new ArrayList<>(nbBuckets); - long startingDate = defaultStart; - if (rarely()) { - startingDate += randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000)); - } - long interval = randomIntBetween(1, 3); long intervalMillis = roundingInfos[roundingIndex].roughEstimateDurationMillis * interval; @@ -94,9 +78,26 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1); } + @Override + protected InternalAutoDateHistogram createTestInstance(String name, + Map metadata, + InternalAggregations aggregations) { + RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null); + int roundingIndex = between(0, roundingInfos.length - 1); + return createTestInstance( + name, + metadata, + aggregations, + randomLongBetween(0, utcMillis("2050-01-01")), + roundingInfos, + roundingIndex, + randomNumericDocValueFormat() + ); + } + /* - This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals - passed in, instead of using the interval associated with the loop. + * This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals + * passed in, instead of using the interval associated with the loop. */ public void testGetAppropriateRoundingUsesCorrectIntervals() { RoundingInfo[] roundings = new RoundingInfo[6]; @@ -121,8 +122,23 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati } @Override - protected void assertReduced(InternalAutoDateHistogram reduced, List inputs) { + protected List randomResultsToReduce(String name, int size) { + long startingDate = randomLongBetween(0, utcMillis("2050-01-01")); + RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null); + int roundingIndex = between(0, roundingInfos.length - 1); + DocValueFormat format = randomNumericDocValueFormat(); + List result = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + long thisResultStart = startingDate; + thisResultStart += usually() ? 0 :randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000)); + result.add(createTestInstance(name, null, InternalAggregations.EMPTY, thisResultStart, roundingInfos, roundingIndex, format)); + } + return result; + } + @Override + protected void assertReduced(InternalAutoDateHistogram reduced, List inputs) { + int totalBucketConut = 0; long lowest = Long.MAX_VALUE; long highest = 0; @@ -135,11 +151,12 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati if (bucketKey > highest) { highest = bucketKey; } + totalBucketConut++; } } int roundingIndex = reduced.getBucketInfo().roundingIdx; - RoundingInfo roundingInfo = roundingInfos[roundingIndex]; + RoundingInfo roundingInfo = AutoDateHistogramAggregationBuilder.buildRoundings(null, null)[roundingIndex]; long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis(); int innerIntervalIndex = 0; @@ -163,53 +180,58 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati * number of buckets. */ int innerIntervalToUse; - do { - innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex]; - int bucketCount = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse); - if (bucketCount == reduced.getBuckets().size()) { - break; - } - if (bucketCount < reduced.getBuckets().size()) { - innerIntervalToUse = roundingInfo.innerIntervals[Math.max(0, innerIntervalIndex - 1)]; - break; - } - } while (++innerIntervalIndex < roundingInfo.innerIntervals.length); + if (totalBucketConut == 0) { + innerIntervalToUse = roundingInfo.innerIntervals[0]; + } else { + do { + innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex]; + int bucketCountAtInterval = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse); + if (bucketCountAtInterval == reduced.getBuckets().size()) { + break; + } + if (bucketCountAtInterval < reduced.getBuckets().size()) { + innerIntervalToUse = roundingInfo.innerIntervals[Math.max(0, innerIntervalIndex - 1)]; + break; + } + } while (++innerIntervalIndex < roundingInfo.innerIntervals.length); + } assertThat(reduced.getInterval().toString(), equalTo(innerIntervalToUse + roundingInfo.unitAbbreviation)); Map expectedCounts = new TreeMap<>(); - long keyForBucket = roundingInfo.rounding.round(lowest); - while (keyForBucket <= roundingInfo.rounding.round(highest)) { - long nextKey = keyForBucket; - for (int i = 0; i < innerIntervalToUse; i++) { - nextKey = roundingInfo.rounding.nextRoundingValue(nextKey); - } - Instant key = Instant.ofEpochMilli(keyForBucket); - expectedCounts.put(key, 0L); + if (totalBucketConut > 0) { + long keyForBucket = roundingInfo.rounding.round(lowest); + while (keyForBucket <= roundingInfo.rounding.round(highest)) { + long nextKey = keyForBucket; + for (int i = 0; i < innerIntervalToUse; i++) { + nextKey = roundingInfo.rounding.nextRoundingValue(nextKey); + } + Instant key = Instant.ofEpochMilli(keyForBucket); + expectedCounts.put(key, 0L); - // Iterate through the input buckets, and for each bucket, determine if it's inside - // the range of the bucket in the outer loop. if it is, add the doc count to the total - // for that bucket. + // Iterate through the input buckets, and for each bucket, determine if it's inside + // the range of the bucket in the outer loop. if it is, add the doc count to the total + // for that bucket. - for (InternalAutoDateHistogram histogram : inputs) { - for (Histogram.Bucket bucket : histogram.getBuckets()) { - long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli()); - long docCount = bucket.getDocCount(); - if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) { - expectedCounts.compute(key, - (k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount); + for (InternalAutoDateHistogram histogram : inputs) { + for (Histogram.Bucket bucket : histogram.getBuckets()) { + long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli()); + long docCount = bucket.getDocCount(); + if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) { + expectedCounts.compute(key, + (k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount); + } } } + keyForBucket = nextKey; } - keyForBucket = nextKey; - } - // If there is only a single bucket, and we haven't added it above, add a bucket with no documents. - // this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above. - if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) { - expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L); + // If there is only a single bucket, and we haven't added it above, add a bucket with no documents. + // this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above. + if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) { + expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L); + } } - // pick out the actual reduced values to the make the assertion more readable Map actualCounts = new TreeMap<>(); for (Histogram.Bucket bucket : reduced.getBuckets()) { @@ -257,7 +279,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati break; case 1: buckets = new ArrayList<>(buckets); - buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, + buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), instance.getFormatter(), InternalAggregations.EMPTY)); break; case 2: @@ -275,7 +297,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati default: throw new AssertionError("Illegal randomisation branch"); } - return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1); + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, instance.getFormatter(), metadata, 1); } public void testReduceSecond() {