Test: Protect auto_date_histo from 0 buckets

The test for `auto_date_histogram` as trying to round `Long.MAX_VALUE`
if there were 0 buckets. That doesn't work.

Also, this replaces all of the class variables created to make
consistent random result when testing `InternalAutoDateHistogram` with
the newer `randomResultsToReduce` which is a little simpler to
understand.
This commit is contained in:
Nik Everett 2020-06-03 12:07:58 -04:00
parent 4f4c4a8713
commit 7fd94f7d0f
1 changed files with 89 additions and 67 deletions

View File

@ -53,35 +53,19 @@ import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase<InternalAutoDateHistogram> { public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase<InternalAutoDateHistogram> {
protected InternalAutoDateHistogram createTestInstance(
private DocValueFormat format; String name,
private RoundingInfo[] roundingInfos; Map<String, Object> metadata,
private long defaultStart; InternalAggregations aggregations,
private int roundingIndex; long startingDate,
RoundingInfo[] roundingInfos,
@Override int roundingIndex,
public void setUp() throws Exception { DocValueFormat format
super.setUp(); ) {
format = randomNumericDocValueFormat();
defaultStart = randomLongBetween(0, utcMillis("2050-01-01"));
roundingIndex = between(0, AutoDateHistogramAggregationBuilder.buildRoundings(null, null).length - 1);
}
@Override
protected InternalAutoDateHistogram createTestInstance(String name,
Map<String, Object> metadata,
InternalAggregations aggregations) {
roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null);
int nbBuckets = randomNumberOfBuckets(); int nbBuckets = randomNumberOfBuckets();
int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1);
List<InternalAutoDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets); List<InternalAutoDateHistogram.Bucket> buckets = new ArrayList<>(nbBuckets);
long startingDate = defaultStart;
if (rarely()) {
startingDate += randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000));
}
long interval = randomIntBetween(1, 3); long interval = randomIntBetween(1, 3);
long intervalMillis = roundingInfos[roundingIndex].roughEstimateDurationMillis * interval; long intervalMillis = roundingInfos[roundingIndex].roughEstimateDurationMillis * interval;
@ -94,9 +78,26 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1); return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1);
} }
@Override
protected InternalAutoDateHistogram createTestInstance(String name,
Map<String, Object> metadata,
InternalAggregations aggregations) {
RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null);
int roundingIndex = between(0, roundingInfos.length - 1);
return createTestInstance(
name,
metadata,
aggregations,
randomLongBetween(0, utcMillis("2050-01-01")),
roundingInfos,
roundingIndex,
randomNumericDocValueFormat()
);
}
/* /*
This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals * This test was added to reproduce a bug where getAppropriateRounding was only ever using the first innerIntervals
passed in, instead of using the interval associated with the loop. * passed in, instead of using the interval associated with the loop.
*/ */
public void testGetAppropriateRoundingUsesCorrectIntervals() { public void testGetAppropriateRoundingUsesCorrectIntervals() {
RoundingInfo[] roundings = new RoundingInfo[6]; RoundingInfo[] roundings = new RoundingInfo[6];
@ -121,8 +122,23 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
} }
@Override @Override
protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAutoDateHistogram> inputs) { protected List<InternalAutoDateHistogram> randomResultsToReduce(String name, int size) {
long startingDate = randomLongBetween(0, utcMillis("2050-01-01"));
RoundingInfo[] roundingInfos = AutoDateHistogramAggregationBuilder.buildRoundings(null, null);
int roundingIndex = between(0, roundingInfos.length - 1);
DocValueFormat format = randomNumericDocValueFormat();
List<InternalAutoDateHistogram> result = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
long thisResultStart = startingDate;
thisResultStart += usually() ? 0 :randomFrom(TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS).toMillis(between(1, 10000));
result.add(createTestInstance(name, null, InternalAggregations.EMPTY, thisResultStart, roundingInfos, roundingIndex, format));
}
return result;
}
@Override
protected void assertReduced(InternalAutoDateHistogram reduced, List<InternalAutoDateHistogram> inputs) {
int totalBucketConut = 0;
long lowest = Long.MAX_VALUE; long lowest = Long.MAX_VALUE;
long highest = 0; long highest = 0;
@ -135,11 +151,12 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
if (bucketKey > highest) { if (bucketKey > highest) {
highest = bucketKey; highest = bucketKey;
} }
totalBucketConut++;
} }
} }
int roundingIndex = reduced.getBucketInfo().roundingIdx; int roundingIndex = reduced.getBucketInfo().roundingIdx;
RoundingInfo roundingInfo = roundingInfos[roundingIndex]; RoundingInfo roundingInfo = AutoDateHistogramAggregationBuilder.buildRoundings(null, null)[roundingIndex];
long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis(); long normalizedDuration = (highest - lowest) / roundingInfo.getRoughEstimateDurationMillis();
int innerIntervalIndex = 0; int innerIntervalIndex = 0;
@ -163,53 +180,58 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
* number of buckets. * number of buckets.
*/ */
int innerIntervalToUse; int innerIntervalToUse;
do { if (totalBucketConut == 0) {
innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex]; innerIntervalToUse = roundingInfo.innerIntervals[0];
int bucketCount = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse); } else {
if (bucketCount == reduced.getBuckets().size()) { do {
break; innerIntervalToUse = roundingInfo.innerIntervals[innerIntervalIndex];
} int bucketCountAtInterval = getBucketCount(lowest, highest, roundingInfo.rounding, innerIntervalToUse);
if (bucketCount < reduced.getBuckets().size()) { if (bucketCountAtInterval == reduced.getBuckets().size()) {
innerIntervalToUse = roundingInfo.innerIntervals[Math.max(0, innerIntervalIndex - 1)]; break;
break; }
} if (bucketCountAtInterval < reduced.getBuckets().size()) {
} while (++innerIntervalIndex < roundingInfo.innerIntervals.length); innerIntervalToUse = roundingInfo.innerIntervals[Math.max(0, innerIntervalIndex - 1)];
break;
}
} while (++innerIntervalIndex < roundingInfo.innerIntervals.length);
}
assertThat(reduced.getInterval().toString(), equalTo(innerIntervalToUse + roundingInfo.unitAbbreviation)); assertThat(reduced.getInterval().toString(), equalTo(innerIntervalToUse + roundingInfo.unitAbbreviation));
Map<Instant, Long> expectedCounts = new TreeMap<>(); Map<Instant, Long> expectedCounts = new TreeMap<>();
long keyForBucket = roundingInfo.rounding.round(lowest); if (totalBucketConut > 0) {
while (keyForBucket <= roundingInfo.rounding.round(highest)) { long keyForBucket = roundingInfo.rounding.round(lowest);
long nextKey = keyForBucket; while (keyForBucket <= roundingInfo.rounding.round(highest)) {
for (int i = 0; i < innerIntervalToUse; i++) { long nextKey = keyForBucket;
nextKey = roundingInfo.rounding.nextRoundingValue(nextKey); for (int i = 0; i < innerIntervalToUse; i++) {
} nextKey = roundingInfo.rounding.nextRoundingValue(nextKey);
Instant key = Instant.ofEpochMilli(keyForBucket); }
expectedCounts.put(key, 0L); Instant key = Instant.ofEpochMilli(keyForBucket);
expectedCounts.put(key, 0L);
// Iterate through the input buckets, and for each bucket, determine if it's inside // Iterate through the input buckets, and for each bucket, determine if it's inside
// the range of the bucket in the outer loop. if it is, add the doc count to the total // the range of the bucket in the outer loop. if it is, add the doc count to the total
// for that bucket. // for that bucket.
for (InternalAutoDateHistogram histogram : inputs) { for (InternalAutoDateHistogram histogram : inputs) {
for (Histogram.Bucket bucket : histogram.getBuckets()) { for (Histogram.Bucket bucket : histogram.getBuckets()) {
long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli()); long roundedBucketKey = roundingInfo.rounding.round(((ZonedDateTime) bucket.getKey()).toInstant().toEpochMilli());
long docCount = bucket.getDocCount(); long docCount = bucket.getDocCount();
if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) { if (roundedBucketKey >= keyForBucket && roundedBucketKey < nextKey) {
expectedCounts.compute(key, expectedCounts.compute(key,
(k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount); (k, oldValue) -> (oldValue == null ? 0 : oldValue) + docCount);
}
} }
} }
keyForBucket = nextKey;
} }
keyForBucket = nextKey;
}
// If there is only a single bucket, and we haven't added it above, add a bucket with no documents. // If there is only a single bucket, and we haven't added it above, add a bucket with no documents.
// this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above. // this step is necessary because of the roundedBucketKey < keyForBucket + intervalInMillis above.
if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) { if (roundingInfo.rounding.round(lowest) == roundingInfo.rounding.round(highest) && expectedCounts.isEmpty()) {
expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L); expectedCounts.put(Instant.ofEpochMilli(roundingInfo.rounding.round(lowest)), 0L);
}
} }
// pick out the actual reduced values to the make the assertion more readable // pick out the actual reduced values to the make the assertion more readable
Map<Instant, Long> actualCounts = new TreeMap<>(); Map<Instant, Long> actualCounts = new TreeMap<>();
for (Histogram.Bucket bucket : reduced.getBuckets()) { for (Histogram.Bucket bucket : reduced.getBuckets()) {
@ -257,7 +279,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
break; break;
case 1: case 1:
buckets = new ArrayList<>(buckets); buckets = new ArrayList<>(buckets);
buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), instance.getFormatter(),
InternalAggregations.EMPTY)); InternalAggregations.EMPTY));
break; break;
case 2: case 2:
@ -275,7 +297,7 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
default: default:
throw new AssertionError("Illegal randomisation branch"); throw new AssertionError("Illegal randomisation branch");
} }
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1); return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, instance.getFormatter(), metadata, 1);
} }
public void testReduceSecond() { public void testReduceSecond() {