Fix auto_date_histogram interval (#56252) (#56341)

`auto_date_histogram` was returning the incorrect `interval` because
of a combination of two things:
1. When pipeline aggregations rewrote `auto_date_histogram` we reset the
   interval to 1. Oops. Fixed that.
2. *Every* bucket aggregation was rewriting its buckets as though there
   was a pipeline aggregation even if there aren't any. This is a bit
   silly so we skip that too.

Closes #56116
This commit is contained in:
Nik Everett 2020-05-07 10:27:40 -04:00 committed by GitHub
parent bd0e0f41a0
commit b5e385fa56
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 125 additions and 11 deletions

View File

@ -25,6 +25,9 @@ setup:
---
"basic":
- skip:
version: " - 7.8.99"
reason: interval had a in bug before 7.9.0
- do:
search:
rest_total_hits_as_int: true
@ -37,6 +40,7 @@ setup:
buckets: 2
- match: { hits.total: 4 }
- length: { aggregations.histo.buckets: 2 }
- match: { aggregations.histo.interval: "7d" }
- match: { aggregations.histo.buckets.0.key_as_string: "2020-03-01T00:00:00.000Z" }
- match: { aggregations.histo.buckets.0.doc_count: 2 }
- match: { aggregations.histo.buckets.1.key_as_string: "2020-03-08T00:00:00.000Z" }

View File

@ -152,8 +152,12 @@ public abstract class InternalMultiBucketAggregation<A extends InternalMultiBuck
public final InternalAggregation reducePipelines(
InternalAggregation reducedAggs, ReduceContext reduceContext, PipelineTree pipelineTree) {
assert reduceContext.isFinalReduce();
List<B> materializedBuckets = reducePipelineBuckets(reduceContext, pipelineTree);
return super.reducePipelines(create(materializedBuckets), reduceContext, pipelineTree);
InternalAggregation reduced = this;
if (pipelineTree.hasSubTrees()) {
List<B> materializedBuckets = reducePipelineBuckets(reduceContext, pipelineTree);
reduced = create(materializedBuckets);
}
return super.reducePipelines(reduced, reduceContext, pipelineTree);
}
@Override

View File

@ -121,13 +121,17 @@ public abstract class InternalSingleBucketAggregation extends InternalAggregatio
public final InternalAggregation reducePipelines(
InternalAggregation reducedAggs, ReduceContext reduceContext, PipelineTree pipelineTree) {
assert reduceContext.isFinalReduce();
List<InternalAggregation> aggs = new ArrayList<>();
for (Aggregation agg : getAggregations().asList()) {
PipelineTree subTree = pipelineTree.subTree(agg.getName());
aggs.add(((InternalAggregation)agg).reducePipelines((InternalAggregation)agg, reduceContext, subTree));
InternalAggregation reduced = this;
if (pipelineTree.hasSubTrees()) {
List<InternalAggregation> aggs = new ArrayList<>();
for (Aggregation agg : getAggregations().asList()) {
PipelineTree subTree = pipelineTree.subTree(agg.getName());
aggs.add(((InternalAggregation)agg).reducePipelines((InternalAggregation)agg, reduceContext, subTree));
}
InternalAggregations reducedSubAggs = new InternalAggregations(aggs);
reduced = create(reducedSubAggs);
}
InternalAggregations reducedSubAggs = new InternalAggregations(aggs);
return super.reducePipelines(create(reducedSubAggs), reduceContext, pipelineTree);
return super.reducePipelines(reduced, reduceContext, pipelineTree);
}
@Override

View File

@ -196,7 +196,10 @@ public final class InternalAutoDateHistogram extends
private final DocValueFormat format;
private final BucketInfo bucketInfo;
private final int targetBuckets;
private long bucketInnerInterval;
/**
* The interval within the rounding that the buckets are using.
*/
private final long bucketInnerInterval;
InternalAutoDateHistogram(String name, List<Bucket> buckets, int targetBuckets, BucketInfo emptyBucketInfo, DocValueFormat formatter,
Map<String, Object> metadata, long bucketInnerInterval) {
@ -217,6 +220,7 @@ public final class InternalAutoDateHistogram extends
format = in.readNamedWriteable(DocValueFormat.class);
buckets = in.readList(stream -> new Bucket(stream, format));
this.targetBuckets = in.readVInt();
bucketInnerInterval = 1; // Calculated on merge.
}
@Override
@ -258,7 +262,7 @@ public final class InternalAutoDateHistogram extends
@Override
public InternalAutoDateHistogram create(List<Bucket> buckets) {
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, 1);
return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, metadata, bucketInnerInterval);
}
@Override
@ -593,7 +597,7 @@ public final class InternalAutoDateHistogram extends
buckets2.add((Bucket) b);
}
buckets2 = Collections.unmodifiableList(buckets2);
return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, getMetadata(), 1);
return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, getMetadata(), bucketInnerInterval);
}
@Override

View File

@ -96,6 +96,13 @@ public abstract class PipelineAggregator implements NamedWriteable {
return subTrees.getOrDefault(name, EMPTY);
}
/**
* Return {@code true} if this node in the tree has any subtrees.
*/
public boolean hasSubTrees() {
return false == subTrees.isEmpty();
}
@Override
public String toString() {
return "PipelineTree[" + aggregators + "," + subTrees + "]";

View File

@ -19,13 +19,22 @@
package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalSingleBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.sameInstance;
public class InternalFilterTests extends InternalSingleBucketAggregationTestCase<InternalFilter> {
@Override
protected InternalFilter createTestInstance(String name, long docCount, InternalAggregations aggregations,
@ -42,4 +51,32 @@ public class InternalFilterTests extends InternalSingleBucketAggregationTestCase
protected Class<? extends ParsedSingleBucketAggregation> implementationClass() {
return ParsedFilter.class;
}
public void testReducePipelinesReturnsSameInstanceWithoutPipelines() {
InternalFilter test = createTestInstance();
assertThat(test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), PipelineTree.EMPTY), sameInstance(test));
}
public void testReducePipelinesReducesBucketPipelines() {
/*
* Tests that a pipeline buckets by creating a mock pipeline that
* replaces "inner" with "dummy".
*/
InternalFilter dummy = createTestInstance();
InternalFilter inner = createTestInstance();
InternalAggregations sub = new InternalAggregations(singletonList(inner));
InternalFilter test = createTestInstance("test", randomNonNegativeLong(), sub, emptyMap());
PipelineAggregator mockPipeline = new PipelineAggregator(null, null, null) {
@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
return dummy;
}
};
PipelineTree tree = new PipelineTree(
org.elasticsearch.common.collect.Map.of(inner.getName(), new PipelineTree(emptyMap(), singletonList(mockPipeline))),
emptyList());
InternalFilter reduced = (InternalFilter) test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), tree);
assertThat(reduced.getAggregations().get(dummy.getName()), sameInstance(dummy));
}
}

View File

@ -19,9 +19,13 @@
package org.elasticsearch.search.aggregations.bucket.filter;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters.InternalBucket;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
import java.util.ArrayList;
@ -30,6 +34,11 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.sameInstance;
public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase<InternalFilters> {
private boolean keyed;
@ -109,4 +118,34 @@ public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase
}
return new InternalFilters(name, buckets, keyed, metadata);
}
public void testReducePipelinesReturnsSameInstanceWithoutPipelines() {
InternalFilters test = createTestInstance();
assertThat(test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), PipelineTree.EMPTY), sameInstance(test));
}
public void testReducePipelinesReducesBucketPipelines() {
/*
* Tests that a pipeline buckets by creating a mock pipeline that
* replaces "inner" with "dummy".
*/
InternalFilters dummy = createTestInstance();
InternalFilters inner = createTestInstance();
InternalAggregations sub = new InternalAggregations(singletonList(inner));
InternalFilters test = createTestInstance("test", emptyMap(), sub);
PipelineAggregator mockPipeline = new PipelineAggregator(null, null, null) {
@Override
public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) {
return dummy;
}
};
PipelineTree tree = new PipelineTree(
org.elasticsearch.common.collect.Map.of(inner.getName(), new PipelineTree(emptyMap(), singletonList(mockPipeline))),
emptyList());
InternalFilters reduced = (InternalFilters) test.reducePipelines(test, emptyReduceContextBuilder().forFinalReduction(), tree);
for (InternalFilters.InternalBucket bucket : reduced.getBuckets()) {
assertThat(bucket.getAggregations().get(dummy.getName()), sameInstance(dummy));
}
}
}

View File

@ -50,6 +50,7 @@ import static java.util.stream.Collectors.toList;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase<InternalAutoDateHistogram> {
@ -376,4 +377,18 @@ public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregati
private List<Integer> docCounts(InternalAutoDateHistogram h) {
return h.getBuckets().stream().map(b -> (int) b.getDocCount()).collect(toList());
}
public void testCreateWithReplacementBuckets() {
InternalAutoDateHistogram noInterval = createTestInstance();
InternalAutoDateHistogram orig = new InternalAutoDateHistogram(
noInterval.getName(), noInterval.getBuckets(), noInterval.getTargetBuckets(), noInterval.getBucketInfo(),
noInterval.getFormatter(), noInterval.getMetadata(), randomLong());
InternalAutoDateHistogram copy = orig.create(emptyList());
assertThat(copy.getName(), equalTo(orig.getName()));
assertThat(copy.getBuckets(), hasSize(0));
assertThat(copy.getTargetBuckets(), equalTo(orig.getTargetBuckets()));
assertThat(copy.getBucketInfo(), equalTo(orig.getBucketInfo()));
assertThat(copy.getFormatter(), equalTo(orig.getFormatter()));
assertThat(copy.getInterval(), equalTo(orig.getInterval()));
}
}