diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml index 991eab27143..acd3d7e02a6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/10_histogram.yml @@ -492,7 +492,65 @@ setup: - match: { aggregations.histo.buckets.0.doc_count: 1 } --- -"profiler": +"histogram profiler": + - skip: + version: " - 7.8.99" + reason: debug info added in 7.9.0 + + - do: + indices.create: + index: test_2 + body: + settings: + number_of_replicas: 0 + number_of_shards: 1 + mappings: + properties: + n: + type: long + + - do: + bulk: + index: test_2 + refresh: true + body: + - '{"index": {}}' + - '{"n": "1"}' + - '{"index": {}}' + - '{"n": "2"}' + - '{"index": {}}' + - '{"n": "10"}' + - '{"index": {}}' + - '{"n": "17"}' + + - do: + search: + index: test_2 + body: + size: 0 + profile: true + aggs: + histo: + histogram: + field: n + interval: 5 + - match: { hits.total.value: 4 } + - length: { aggregations.histo.buckets: 4 } + - match: { aggregations.histo.buckets.0.key: 0 } + - match: { aggregations.histo.buckets.0.doc_count: 2 } + - match: { aggregations.histo.buckets.1.key: 5 } + - match: { aggregations.histo.buckets.1.doc_count: 0 } + - match: { aggregations.histo.buckets.2.key: 10 } + - match: { aggregations.histo.buckets.2.doc_count: 1 } + - match: { aggregations.histo.buckets.3.key: 15 } + - match: { aggregations.histo.buckets.3.doc_count: 1 } + - match: { profile.shards.0.aggregations.0.type: NumericHistogramAggregator } + - match: { profile.shards.0.aggregations.0.description: histo } + - match: { profile.shards.0.aggregations.0.breakdown.collect_count: 4 } + - match: { profile.shards.0.aggregations.0.debug.total_buckets: 3 } + +--- +"date_histogram profiler": - skip: version: " - 7.8.99" reason: debug info added in 7.9.0 diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java new file mode 100644 index 00000000000..f026bcdc01f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -0,0 +1,124 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.EmptyBucketInfo; +import org.elasticsearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.function.BiConsumer; + +/** + * Base class for functionality shared between aggregators for this + * {@code histogram} aggregation. + */ +public abstract class AbstractHistogramAggregator extends BucketsAggregator { + protected final DocValueFormat formatter; + protected final double interval; + protected final double offset; + protected final BucketOrder order; + protected final boolean keyed; + protected final long minDocCount; + protected final double minBound; + protected final double maxBound; + protected final LongKeyedBucketOrds bucketOrds; + + public AbstractHistogramAggregator( + String name, + AggregatorFactories factories, + double interval, + double offset, + BucketOrder order, + boolean keyed, + long minDocCount, + double minBound, + double maxBound, + DocValueFormat formatter, + SearchContext context, + Aggregator parent, + boolean collectsFromSingleBucket, + Map metadata + ) throws IOException { + super(name, factories, context, parent, metadata); + if (interval <= 0) { + throw new IllegalArgumentException("interval must be positive, got: " + interval); + } + this.interval = interval; + this.offset = offset; + this.order = order; + order.validate(this); + this.keyed = keyed; + this.minDocCount = minDocCount; + this.minBound = minBound; + this.maxBound = maxBound; + this.formatter = formatter; + bucketOrds = LongKeyedBucketOrds.build(context.bigArrays(), collectsFromSingleBucket); + } + + @Override + public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, + (bucketValue, docCount, subAggregationResults) -> { + double roundKey = Double.longBitsToDouble(bucketValue); + double key = roundKey * interval + offset; + return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); + }, buckets -> { + // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order + CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); + + EmptyBucketInfo emptyBucketInfo = null; + if (minDocCount == 0) { + emptyBucketInfo = new EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations()); + } + return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); + }); + } + + @Override + public InternalAggregation buildEmptyAggregation() { + InternalHistogram.EmptyBucketInfo emptyBucketInfo = null; + if (minDocCount == 0) { + emptyBucketInfo = new InternalHistogram.EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations()); + } + return new InternalHistogram(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); + } + + @Override + public void doClose() { + Releasables.close(bucketOrds); + } + + @Override + public void collectDebugInfo(BiConsumer add) { + add.accept("total_buckets", bucketOrds.size()); + super.collectDebugInfo(add); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java index b28baa38091..6b9263a7af2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregatorFactory.java @@ -19,8 +19,8 @@ package org.elasticsearch.search.aggregations.bucket.histogram; +import org.elasticsearch.common.collect.List; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -36,8 +36,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.Map; /** @@ -54,40 +52,11 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact static void registerAggregators(ValuesSourceRegistry.Builder builder) { builder.register(HistogramAggregationBuilder.NAME, CoreValuesSourceType.RANGE, - new HistogramAggregatorSupplier() { - @Override - public Aggregator build(String name, AggregatorFactories factories, double interval, double offset, - BucketOrder order, boolean keyed, long minDocCount, double minBound, double maxBound, - ValuesSource valuesSource, DocValueFormat formatter, SearchContext context, - Aggregator parent, - Map metadata) throws IOException { - ValuesSource.Range rangeValueSource = (ValuesSource.Range) valuesSource; - if (rangeValueSource.rangeType().isNumeric() == false) { - throw new IllegalArgumentException("Expected numeric range type but found non-numeric range [" - + rangeValueSource.rangeType().name + "]"); - } - return new RangeHistogramAggregator(name, factories, interval, offset, order, keyed, minDocCount, minBound, - maxBound, rangeValueSource, formatter, context, parent, metadata); - } - } - ); + (HistogramAggregatorSupplier) RangeHistogramAggregator::new); builder.register(HistogramAggregationBuilder.NAME, - Collections.unmodifiableList(Arrays.asList(CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.DATE, - CoreValuesSourceType.BOOLEAN)), - new HistogramAggregatorSupplier() { - @Override - public Aggregator build(String name, AggregatorFactories factories, double interval, double offset, - BucketOrder order, boolean keyed, long minDocCount, double minBound, double maxBound, - ValuesSource valuesSource, DocValueFormat formatter, SearchContext context, - Aggregator parent, - Map metadata) throws IOException { - return new NumericHistogramAggregator(name, factories, interval, offset, order, keyed, minDocCount, minBound, - maxBound, (ValuesSource.Numeric) valuesSource, formatter, context, parent, metadata); - } - } - ); + List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.DATE, CoreValuesSourceType.BOOLEAN), + (HistogramAggregatorSupplier) NumericHistogramAggregator::new); } public HistogramAggregatorFactory(String name, @@ -123,10 +92,6 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact Aggregator parent, boolean collectsFromSingleBucket, Map metadata) throws IOException { - if (collectsFromSingleBucket == false) { - return asMultiBucketAggregator(this, searchContext, parent); - } - AggregatorSupplier aggregatorSupplier = queryShardContext.getValuesSourceRegistry().getAggregator(config.valueSourceType(), HistogramAggregationBuilder.NAME); if (aggregatorSupplier instanceof HistogramAggregatorSupplier == false) { @@ -135,7 +100,7 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact } HistogramAggregatorSupplier histogramAggregatorSupplier = (HistogramAggregatorSupplier) aggregatorSupplier; return histogramAggregatorSupplier.build(name, factories, interval, offset, order, keyed, minDocCount, minBound, maxBound, - valuesSource, config.format(), searchContext, parent, metadata); + valuesSource, config.format(), searchContext, parent, collectsFromSingleBucket, metadata); } @Override @@ -143,6 +108,6 @@ public final class HistogramAggregatorFactory extends ValuesSourceAggregatorFact Aggregator parent, Map metadata) throws IOException { return new NumericHistogramAggregator(name, factories, interval, offset, order, keyed, minDocCount, minBound, maxBound, - null, config.format(), searchContext, parent, metadata); + null, config.format(), searchContext, parent, false, metadata); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java index 04fcf2a19b2..b4d0b233b8f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregator.java @@ -21,27 +21,19 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.util.LongHash; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; -import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.EmptyBucketInfo; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.Collections; import java.util.Map; -import java.util.function.BiConsumer; /** * An aggregator for numeric values. For a given {@code interval}, @@ -49,39 +41,43 @@ import java.util.function.BiConsumer; * written as {@code interval * x + offset} and yet is less than or equal to * {@code value}. */ -public class NumericHistogramAggregator extends BucketsAggregator { - +public class NumericHistogramAggregator extends AbstractHistogramAggregator { private final ValuesSource.Numeric valuesSource; - private final DocValueFormat formatter; - private final double interval, offset; - private final BucketOrder order; - private final boolean keyed; - private final long minDocCount; - private final double minBound, maxBound; - private final LongHash bucketOrds; - - public NumericHistogramAggregator(String name, AggregatorFactories factories, double interval, double offset, - BucketOrder order, boolean keyed, long minDocCount, double minBound, double maxBound, - @Nullable ValuesSource.Numeric valuesSource, DocValueFormat formatter, - SearchContext context, Aggregator parent, Map metadata) throws IOException { - - super(name, factories, context, parent, metadata); - if (interval <= 0) { - throw new IllegalArgumentException("interval must be positive, got: " + interval); - } - this.interval = interval; - this.offset = offset; - this.order = order; - order.validate(this); - this.keyed = keyed; - this.minDocCount = minDocCount; - this.minBound = minBound; - this.maxBound = maxBound; - this.valuesSource = valuesSource; - this.formatter = formatter; - - bucketOrds = new LongHash(1, context.bigArrays()); + public NumericHistogramAggregator( + String name, + AggregatorFactories factories, + double interval, + double offset, + BucketOrder order, + boolean keyed, + long minDocCount, + double minBound, + double maxBound, + @Nullable ValuesSource valuesSource, + DocValueFormat formatter, + SearchContext context, + Aggregator parent, + boolean collectsFromSingleBucket, + Map metadata + ) throws IOException { + super( + name, + factories, + interval, + offset, + order, + keyed, + minDocCount, + minBound, + maxBound, + formatter, + context, + parent, + collectsFromSingleBucket, + metadata + ); + this.valuesSource = (ValuesSource.Numeric) valuesSource; } @Override @@ -102,8 +98,7 @@ public class NumericHistogramAggregator extends BucketsAggregator { final SortedNumericDoubleValues values = valuesSource.doubleValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override - public void collect(int doc, long bucket) throws IOException { - assert bucket == 0; + public void collect(int doc, long owningBucketOrd) throws IOException { if (values.advanceExact(doc)) { final int valuesCount = values.docValueCount(); @@ -115,7 +110,7 @@ public class NumericHistogramAggregator extends BucketsAggregator { if (key == previousKey) { continue; } - long bucketOrd = bucketOrds.add(Double.doubleToLongBits(key)); + long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key)); if (bucketOrd < 0) { // already seen bucketOrd = -1 - bucketOrd; collectExistingBucket(sub, doc, bucketOrd); @@ -128,43 +123,4 @@ public class NumericHistogramAggregator extends BucketsAggregator { } }; } - - @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, - (bucketValue, docCount, subAggregationResults) -> { - double roundKey = Double.longBitsToDouble(bucketValue); - double key = roundKey * interval + offset; - return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); - }, buckets -> { - // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order - CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); - - EmptyBucketInfo emptyBucketInfo = null; - if (minDocCount == 0) { - emptyBucketInfo = new EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations()); - } - return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); - }); - } - - @Override - public InternalAggregation buildEmptyAggregation() { - EmptyBucketInfo emptyBucketInfo = null; - if (minDocCount == 0) { - emptyBucketInfo = new EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations()); - } - return new InternalHistogram(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); - } - - @Override - public void doClose() { - Releasables.close(bucketOrds); - } - - @Override - public void collectDebugInfo(BiConsumer add) { - add.accept("total_buckets", bucketOrds.size()); - super.collectDebugInfo(add); - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java index a121c50af75..c51e1ce0d3d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregator.java @@ -21,10 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.util.LongHash; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; @@ -32,51 +29,57 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; -import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.EmptyBucketInfo; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Map; -public class RangeHistogramAggregator extends BucketsAggregator { +public class RangeHistogramAggregator extends AbstractHistogramAggregator { private final ValuesSource.Range valuesSource; - private final DocValueFormat formatter; - private final double interval, offset; - private final BucketOrder order; - private final boolean keyed; - private final long minDocCount; - private final double minBound, maxBound; - private final LongHash bucketOrds; - - public RangeHistogramAggregator(String name, AggregatorFactories factories, double interval, double offset, - BucketOrder order, boolean keyed, long minDocCount, double minBound, double maxBound, - @Nullable ValuesSource.Range valuesSource, DocValueFormat formatter, - SearchContext context, Aggregator parent, Map metadata) throws IOException { - - super(name, factories, context, parent, metadata); - if (interval <= 0) { - throw new IllegalArgumentException("interval must be positive, got: " + interval); + public RangeHistogramAggregator( + String name, + AggregatorFactories factories, + double interval, + double offset, + BucketOrder order, + boolean keyed, + long minDocCount, + double minBound, + double maxBound, + @Nullable ValuesSource valuesSource, + DocValueFormat formatter, + SearchContext context, + Aggregator parent, + boolean collectsFromSingleBucket, + Map metadata + ) throws IOException { + super( + name, + factories, + interval, + offset, + order, + keyed, + minDocCount, + minBound, + maxBound, + formatter, + context, + parent, + collectsFromSingleBucket, + metadata + ); + this.valuesSource = (ValuesSource.Range) valuesSource; + if (this.valuesSource.rangeType().isNumeric() == false) { + throw new IllegalArgumentException( + "Expected numeric range type but found non-numeric range [" + this.valuesSource.rangeType().name + "]" + ); } - this.interval = interval; - this.offset = offset; - this.order = order; - order.validate(this); - this.keyed = keyed; - this.minDocCount = minDocCount; - this.minBound = minBound; - this.maxBound = maxBound; - this.valuesSource = valuesSource; - this.formatter = formatter; - - bucketOrds = new LongHash(1, context.bigArrays()); } @Override @@ -88,8 +91,7 @@ public class RangeHistogramAggregator extends BucketsAggregator { final RangeType rangeType = valuesSource.rangeType(); return new LeafBucketCollectorBase(sub, values) { @Override - public void collect(int doc, long bucket) throws IOException { - assert bucket == 0; + public void collect(int doc, long owningBucketOrd) throws IOException { if (values.advanceExact(doc)) { // Is it possible for valuesCount to be > 1 here? Multiple ranges are encoded into the same BytesRef in the binary doc // values, so it isn't clear what we'd be iterating over. @@ -113,7 +115,7 @@ public class RangeHistogramAggregator extends BucketsAggregator { continue; } // Bucket collection identical to NumericHistogramAggregator, could be refactored - long bucketOrd = bucketOrds.add(Double.doubleToLongBits(key)); + long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key)); if (bucketOrd < 0) { // already seen bucketOrd = -1 - bucketOrd; collectExistingBucket(sub, doc, bucketOrd); @@ -131,39 +133,4 @@ public class RangeHistogramAggregator extends BucketsAggregator { } }; } - - // TODO: buildAggregations and buildEmptyAggregation are literally just copied out of NumericHistogramAggregator. We could refactor - // this to an abstract super class, if we wanted to. Might be overkill. - @Override - public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, - (bucketValue, docCount, subAggregationResults) -> { - double roundKey = Double.longBitsToDouble(bucketValue); - double key = roundKey * interval + offset; - return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); - }, buckets -> { - // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order - CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); - - EmptyBucketInfo emptyBucketInfo = null; - if (minDocCount == 0) { - emptyBucketInfo = new EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations()); - } - return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); - }); - } - - @Override - public InternalAggregation buildEmptyAggregation() { - InternalHistogram.EmptyBucketInfo emptyBucketInfo = null; - if (minDocCount == 0) { - emptyBucketInfo = new InternalHistogram.EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations()); - } - return new InternalHistogram(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, metadata()); - } - - @Override - public void doClose() { - Releasables.close(bucketOrds); - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/HistogramAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/HistogramAggregatorSupplier.java index 83bfa8abf70..c349c50230d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/HistogramAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/HistogramAggregatorSupplier.java @@ -29,9 +29,21 @@ import java.io.IOException; import java.util.Map; public interface HistogramAggregatorSupplier extends AggregatorSupplier { - Aggregator build(String name, AggregatorFactories factories, double interval, double offset, - BucketOrder order, boolean keyed, long minDocCount, double minBound, double maxBound, - @Nullable ValuesSource valuesSource, DocValueFormat formatter, - SearchContext context, Aggregator parent, - Map metadata) throws IOException; + Aggregator build( + String name, + AggregatorFactories factories, + double interval, + double offset, + BucketOrder order, + boolean keyed, + long minDocCount, + double minBound, + double maxBound, + @Nullable ValuesSource valuesSource, + DocValueFormat formatter, + SearchContext context, + Aggregator parent, + boolean collectsFromSingleBucket, + Map metadata + ) throws IOException; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java index 1d49f871d28..7c289f0d8b1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java @@ -23,23 +23,32 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; +import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.function.Consumer; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; public class NumericHistogramAggregatorTests extends AggregatorTestCase { @@ -55,11 +64,9 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -86,11 +93,9 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -119,10 +124,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { "2019-11-10T22:55:46"); String fieldName = "date_field"; - DateFieldMapper.Builder builder = new DateFieldMapper.Builder(fieldName); - DateFieldMapper.DateFieldType fieldType = builder.fieldType(); - fieldType.setName(fieldName); - fieldType.setHasDocValues(true); + DateFieldMapper.DateFieldType fieldType = dateField(fieldName, DateFieldMapper.Resolution.MILLISECONDS); try (Directory dir = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), dir)) { @@ -161,11 +163,9 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(Math.PI); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); assertEquals(4, histogram.getBuckets().size()); assertEquals(-4 * Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -193,11 +193,9 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { .field("field") .interval(10) .minDocCount(2); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); assertEquals(2, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -222,11 +220,9 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { .field("field") .interval(5) .missing(2d); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); assertEquals(4, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -304,8 +300,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); - fieldType.setName("field"); + MappedFieldType fieldType = keywordField("field"); fieldType.setHasDocValues(true); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); @@ -331,11 +326,9 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { .field("field") .interval(5) .offset(Math.PI); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); assertEquals(3, histogram.getBuckets().size()); assertEquals(-10 + Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); @@ -365,11 +358,9 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { .field("field") .interval(interval) .offset(offset); - MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); assertEquals(3, histogram.getBuckets().size()); assertEquals(-10 + expectedOffset, histogram.getBuckets().get(0).getKey()); @@ -403,7 +394,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); assertEquals(6, histogram.getBuckets().size()); assertEquals(-15d, histogram.getBuckets().get(0).getKey()); assertEquals(0, histogram.getBuckets().get(0).getDocCount()); @@ -421,4 +412,37 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { } } } + + public void testAsSubAgg() throws IOException { + AggregationBuilder request = new HistogramAggregationBuilder("outer").field("outer").interval(5).subAggregation( + new HistogramAggregationBuilder("inner").field("inner").interval(5).subAggregation( + new MinAggregationBuilder("min").field("n"))); + CheckedConsumer buildIndex = iw -> { + List> docs = new ArrayList<>(); + for (int n = 0; n < 10000; n++) { + docs.add(org.elasticsearch.common.collect.List.of( + new SortedNumericDocValuesField("outer", n % 100), + new SortedNumericDocValuesField("inner", n / 100), + new SortedNumericDocValuesField("n", n) + )); + } + iw.addDocuments(docs); + }; + Consumer verify = outer -> { + assertThat(outer.getBuckets(), hasSize(20)); + for (int outerIdx = 0; outerIdx < 20; outerIdx++) { + InternalHistogram.Bucket outerBucket = outer.getBuckets().get(outerIdx); + assertThat(outerBucket.getKey(), equalTo(5.0 * outerIdx)); + InternalHistogram inner = outerBucket.getAggregations().get("inner"); + assertThat(inner.getBuckets(), hasSize(20)); + for (int innerIdx = 0; innerIdx < 20; innerIdx++) { + InternalHistogram.Bucket innerBucket = inner.getBuckets().get(innerIdx); + assertThat(innerBucket.getKey(), equalTo(5.0 * innerIdx)); + InternalMin min = innerBucket.getAggregations().get("min"); + assertThat(min.getValue(), equalTo(outerIdx * 5.0 + innerIdx * 500.0)); + } + } + }; + testCase(request, new MatchAllDocsQuery(), buildIndex, verify, longField("outer"), longField("inner"), longField("n")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java index 7f36cb38f83..bcd35d0d3c2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java @@ -21,29 +21,35 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; +import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeType; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.junit.Rule; -import org.junit.rules.ExpectedException; +import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; -import java.util.Set; import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; public class RangeHistogramAggregatorTests extends AggregatorTestCase { - - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - public void testDoubles() throws Exception { RangeType rangeType = RangeType.DOUBLE; try (Directory dir = newDirectory(); @@ -63,12 +69,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(6, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -111,12 +114,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(6, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -157,12 +157,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(6, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); @@ -204,12 +201,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(3, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); @@ -243,12 +237,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(Math.PI); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(6, histogram.getBuckets().size()); assertEquals(-1 * Math.PI, histogram.getBuckets().get(0).getKey()); @@ -292,12 +283,14 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { .field("field") .interval(5) .minDocCount(2); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce( + searcher, + new MatchAllDocsQuery(), + aggBuilder, + rangeField("field", rangeType) + ); assertEquals(2, histogram.getBuckets().size()); assertEquals(5d, histogram.getBuckets().get(0).getKey()); @@ -329,12 +322,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { .field("field") .interval(5) .offset(4); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); //assertEquals(7, histogram.getBuckets().size()); assertEquals(-6d, histogram.getBuckets().get(0).getKey()); @@ -387,12 +377,9 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { .field("field") .interval(interval) .offset(offset); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); - try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(6, histogram.getBuckets().size()); assertEquals(-5d + expectedOffset, histogram.getBuckets().get(0).getKey()); @@ -431,16 +418,64 @@ public class RangeHistogramAggregatorTests extends AggregatorTestCase { HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg") .field("field") .interval(5); - MappedFieldType fieldType = new RangeFieldMapper.Builder("field", rangeType).fieldType(); - fieldType.setName("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - expectedException.expect(IllegalArgumentException.class); - search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + Exception e = expectThrows(IllegalArgumentException.class, () -> + search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType))); + assertThat(e.getMessage(), equalTo("Expected numeric range type but found non-numeric range [ip_range]")); } } - } + public void testAsSubAgg() throws IOException { + AggregationBuilder request = new HistogramAggregationBuilder("outer").field("outer").interval(5).subAggregation( + new HistogramAggregationBuilder("inner").field("inner").interval(5).subAggregation( + new MinAggregationBuilder("min").field("n"))); + CheckedConsumer buildIndex = iw -> { + List> docs = new ArrayList<>(); + for (int n = 0; n < 10000; n++) { + BytesRef outerRange = RangeType.LONG.encodeRanges(org.elasticsearch.common.collect.Set.of( + new RangeFieldMapper.Range(RangeType.LONG, n % 100, n % 100 + 10, true, true) + )); + BytesRef innerRange = RangeType.LONG.encodeRanges(org.elasticsearch.common.collect.Set.of( + new RangeFieldMapper.Range(RangeType.LONG, n / 100, n / 100 + 10, true, true) + )); + + docs.add(org.elasticsearch.common.collect.List.of( + new BinaryDocValuesField("outer", outerRange), + new BinaryDocValuesField("inner", innerRange), + new SortedNumericDocValuesField("n", n) + )); + } + iw.addDocuments(docs); + }; + Consumer verify = outer -> { + assertThat(outer.getBuckets(), hasSize(22)); + for (int outerIdx = 0; outerIdx < 22; outerIdx++) { + InternalHistogram.Bucket outerBucket = outer.getBuckets().get(outerIdx); + assertThat(outerBucket.getKey(), equalTo(5.0 * outerIdx)); + InternalHistogram inner = outerBucket.getAggregations().get("inner"); + assertThat(inner.getBuckets(), hasSize(22)); + for (int innerIdx = 0; innerIdx < 22; innerIdx++) { + InternalHistogram.Bucket innerBucket = inner.getBuckets().get(innerIdx); + assertThat(innerBucket.getKey(), equalTo(5.0 * innerIdx)); + InternalMin min = innerBucket.getAggregations().get("min"); + int minOuterIdxWithOverlappingRange = Math.max(0, outerIdx - 2); + int minInnerIdxWithOverlappingRange = Math.max(0, innerIdx - 2); + assertThat(min.getValue(), + equalTo(minOuterIdxWithOverlappingRange * 5.0 + minInnerIdxWithOverlappingRange * 500.0)); + } + } + }; + testCase( + request, + new MatchAllDocsQuery(), + buildIndex, + verify, + rangeField("outer", RangeType.LONG), + rangeField("inner", RangeType.LONG), + longField("n") + ); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java index 1ea1a840030..25b8b148e50 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java @@ -416,12 +416,6 @@ public class SignificantTermsAggregatorTests extends AggregatorTestCase { } } - private NumberFieldMapper.NumberFieldType longField(String name) { - NumberFieldMapper.NumberFieldType type = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - type.setName(name); - return type; - } - private void addMixedTextDocs(TextFieldType textFieldType, IndexWriter w) throws IOException { for (int i = 0; i < 10; i++) { Document doc = new Document(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index be4996beacb..d7235c08f28 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -1347,12 +1347,6 @@ public class TermsAggregatorTests extends AggregatorTestCase { } } - private NumberFieldMapper.NumberFieldType longField(String name) { - NumberFieldMapper.NumberFieldType type = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); - type.setName(name); - return type; - } - private void assertNestedTopHitsScore(InternalMultiBucketAggregation terms, boolean withScore) { assertThat(terms.getBuckets().size(), equalTo(9)); int ptr = 9; diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 9ef37ee374a..4e29b1418cf 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -73,9 +73,11 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldAliasMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; @@ -865,4 +867,50 @@ public abstract class AggregatorTestCase extends ESTestCase { Releasables.close(releasables); releasables.clear(); } + + /** + * Make a {@linkplain DateFieldMapper.DateFieldType} for a {@code date}. + */ + protected DateFieldMapper.DateFieldType dateField(String name, DateFieldMapper.Resolution resolution) { + DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); + builder.withResolution(resolution); + Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build(); + return builder.build(new BuilderContext(settings, new ContentPath())).fieldType(); + } + + /** + * Make a {@linkplain NumberFieldMapper.NumberFieldType} for a {@code double}. + */ + protected NumberFieldMapper.NumberFieldType doubleField(String name) { + NumberFieldMapper.NumberFieldType result = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); + result.setName(name); + return result; + } + + /** + * Make a {@linkplain DateFieldMapper.DateFieldType} for a {@code date}. + */ + protected KeywordFieldMapper.KeywordFieldType keywordField(String name) { + KeywordFieldMapper.KeywordFieldType result = new KeywordFieldMapper.KeywordFieldType(); + result.setName(name); + return result; + } + + /** + * Make a {@linkplain NumberFieldMapper.NumberFieldType} for a {@code long}. + */ + protected NumberFieldMapper.NumberFieldType longField(String name) { + NumberFieldMapper.NumberFieldType result = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + result.setName(name); + return result; + } + + /** + * Make a {@linkplain NumberFieldMapper.NumberFieldType} for a {@code range}. + */ + protected RangeFieldMapper.RangeFieldType rangeField(String name, RangeType rangeType) { + RangeFieldMapper.RangeFieldType result = new RangeFieldMapper.Builder(name, rangeType).fieldType(); + result.setName(name); + return result; + } }