Add unit tests to histogram aggregations. (#22961)
This commit is contained in:
parent
f09c4e1cdb
commit
eb26e1a292
|
@ -33,6 +33,7 @@ import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
|
@ -208,4 +209,16 @@ public class InternalAggregations implements Aggregations, ToXContent, Streamabl
|
||||||
out.writeNamedWriteableList(aggregations);
|
out.writeNamedWriteableList(aggregations);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return aggregations.equals(((InternalAggregations) obj).aggregations);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(getClass(), aggregations);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@ import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.ListIterator;
|
import java.util.ListIterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementation of {@link Histogram}.
|
* Implementation of {@link Histogram}.
|
||||||
|
@ -72,6 +73,24 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
|
||||||
aggregations = InternalAggregations.readAggregations(in);
|
aggregations = InternalAggregations.readAggregations(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || obj.getClass() != Bucket.class) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
Bucket that = (Bucket) obj;
|
||||||
|
// No need to take the keyed and format parameters into account,
|
||||||
|
// they are already stored and tested on the InternalHistogram object
|
||||||
|
return key == that.key
|
||||||
|
&& docCount == that.docCount
|
||||||
|
&& Objects.equals(aggregations, that.aggregations);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(getClass(), key, docCount, aggregations);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeDouble(key);
|
out.writeDouble(key);
|
||||||
|
@ -162,6 +181,23 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
|
||||||
subAggregations.writeTo(out);
|
subAggregations.writeTo(out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
EmptyBucketInfo that = (EmptyBucketInfo) obj;
|
||||||
|
return interval == that.interval
|
||||||
|
&& offset == that.offset
|
||||||
|
&& minBound == that.minBound
|
||||||
|
&& maxBound == that.maxBound
|
||||||
|
&& Objects.equals(subAggregations, that.subAggregations);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(getClass(), interval, offset, minBound, maxBound, subAggregations);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private final List<Bucket> buckets;
|
private final List<Bucket> buckets;
|
||||||
|
@ -429,4 +465,19 @@ public final class InternalHistogram extends InternalMultiBucketAggregation<Inte
|
||||||
return new Bucket(key.doubleValue(), docCount, keyed, format, aggregations);
|
return new Bucket(key.doubleValue(), docCount, keyed, format, aggregations);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean doEquals(Object obj) {
|
||||||
|
InternalHistogram that = (InternalHistogram) obj;
|
||||||
|
return Objects.equals(buckets, that.buckets)
|
||||||
|
&& Objects.equals(emptyBucketInfo, that.emptyBucketInfo)
|
||||||
|
&& Objects.equals(format, that.format)
|
||||||
|
&& Objects.equals(keyed, that.keyed)
|
||||||
|
&& Objects.equals(minDocCount, that.minDocCount)
|
||||||
|
&& Objects.equals(order, that.order);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected int doHashCode() {
|
||||||
|
return Objects.hash(buckets, emptyBucketInfo, format, keyed, minDocCount, order);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,248 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.Document;
|
||||||
|
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.util.NumericUtils;
|
||||||
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
|
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||||
|
|
||||||
|
public class HistogramAggregatorTests extends AggregatorTestCase {
|
||||||
|
|
||||||
|
public void testLongs() throws Exception {
|
||||||
|
try (Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
|
||||||
|
for (long value : new long[] {7, 3, -10, -6, 5, 50}) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new SortedNumericDocValuesField("field", value));
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg")
|
||||||
|
.field("field")
|
||||||
|
.interval(5);
|
||||||
|
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||||
|
fieldType.setName("field");
|
||||||
|
try (IndexReader reader = w.getReader()) {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
|
||||||
|
assertEquals(4, histogram.getBuckets().size());
|
||||||
|
assertEquals(-10d, histogram.getBuckets().get(0).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(0).getDocCount());
|
||||||
|
assertEquals(0d, histogram.getBuckets().get(1).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(1).getDocCount());
|
||||||
|
assertEquals(5d, histogram.getBuckets().get(2).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(2).getDocCount());
|
||||||
|
assertEquals(50d, histogram.getBuckets().get(3).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(3).getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDoubles() throws Exception {
|
||||||
|
try (Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
|
||||||
|
for (double value : new double[] {9.3, 3.2, -10, -6.5, 5.3, 50.1}) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value)));
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg")
|
||||||
|
.field("field")
|
||||||
|
.interval(5);
|
||||||
|
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE);
|
||||||
|
fieldType.setName("field");
|
||||||
|
try (IndexReader reader = w.getReader()) {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
|
||||||
|
assertEquals(4, histogram.getBuckets().size());
|
||||||
|
assertEquals(-10d, histogram.getBuckets().get(0).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(0).getDocCount());
|
||||||
|
assertEquals(0d, histogram.getBuckets().get(1).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(1).getDocCount());
|
||||||
|
assertEquals(5d, histogram.getBuckets().get(2).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(2).getDocCount());
|
||||||
|
assertEquals(50d, histogram.getBuckets().get(3).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(3).getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testIrrationalInterval() throws Exception {
|
||||||
|
try (Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
|
||||||
|
for (long value : new long[] {3, 2, -10, 5, -9}) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new SortedNumericDocValuesField("field", value));
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg")
|
||||||
|
.field("field")
|
||||||
|
.interval(Math.PI);
|
||||||
|
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||||
|
fieldType.setName("field");
|
||||||
|
try (IndexReader reader = w.getReader()) {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
|
||||||
|
assertEquals(4, histogram.getBuckets().size());
|
||||||
|
assertEquals(-4 * Math.PI, histogram.getBuckets().get(0).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(0).getDocCount());
|
||||||
|
assertEquals(-3 * Math.PI, histogram.getBuckets().get(1).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(1).getDocCount());
|
||||||
|
assertEquals(0d, histogram.getBuckets().get(2).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(2).getDocCount());
|
||||||
|
assertEquals(Math.PI, histogram.getBuckets().get(3).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(3).getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMinDocCount() throws Exception {
|
||||||
|
try (Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
|
||||||
|
for (long value : new long[] {7, 3, -10, -6, 5, 50}) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new SortedNumericDocValuesField("field", value));
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg")
|
||||||
|
.field("field")
|
||||||
|
.interval(10)
|
||||||
|
.minDocCount(2);
|
||||||
|
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||||
|
fieldType.setName("field");
|
||||||
|
try (IndexReader reader = w.getReader()) {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
Histogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
|
||||||
|
assertEquals(2, histogram.getBuckets().size());
|
||||||
|
assertEquals(-10d, histogram.getBuckets().get(0).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(0).getDocCount());
|
||||||
|
assertEquals(0d, histogram.getBuckets().get(1).getKey());
|
||||||
|
assertEquals(3, histogram.getBuckets().get(1).getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testMissing() throws Exception {
|
||||||
|
try (Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
|
||||||
|
for (long value : new long[] {7, 3, -10, -6, 5, 50}) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new SortedNumericDocValuesField("field", value));
|
||||||
|
w.addDocument(doc);
|
||||||
|
w.addDocument(new Document());
|
||||||
|
}
|
||||||
|
|
||||||
|
HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg")
|
||||||
|
.field("field")
|
||||||
|
.interval(5)
|
||||||
|
.missing(2d);
|
||||||
|
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||||
|
fieldType.setName("field");
|
||||||
|
try (IndexReader reader = w.getReader()) {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
|
||||||
|
assertEquals(4, histogram.getBuckets().size());
|
||||||
|
assertEquals(-10d, histogram.getBuckets().get(0).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(0).getDocCount());
|
||||||
|
assertEquals(0d, histogram.getBuckets().get(1).getKey());
|
||||||
|
assertEquals(7, histogram.getBuckets().get(1).getDocCount());
|
||||||
|
assertEquals(5d, histogram.getBuckets().get(2).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(2).getDocCount());
|
||||||
|
assertEquals(50d, histogram.getBuckets().get(3).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(3).getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testOffset() throws Exception {
|
||||||
|
try (Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
|
||||||
|
for (double value : new double[] {9.3, 3.2, -5, -6.5, 5.3}) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value)));
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg")
|
||||||
|
.field("field")
|
||||||
|
.interval(5)
|
||||||
|
.offset(Math.PI);
|
||||||
|
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE);
|
||||||
|
fieldType.setName("field");
|
||||||
|
try (IndexReader reader = w.getReader()) {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
Histogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
|
||||||
|
assertEquals(3, histogram.getBuckets().size());
|
||||||
|
assertEquals(-10 + Math.PI, histogram.getBuckets().get(0).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(0).getDocCount());
|
||||||
|
assertEquals(Math.PI, histogram.getBuckets().get(1).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(1).getDocCount());
|
||||||
|
assertEquals(5 + Math.PI, histogram.getBuckets().get(2).getKey());
|
||||||
|
assertEquals(1, histogram.getBuckets().get(2).getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testExtendedBounds() throws Exception {
|
||||||
|
try (Directory dir = newDirectory();
|
||||||
|
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
|
||||||
|
for (double value : new double[] {3.2, -5, -4.5, 4.3}) {
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value)));
|
||||||
|
w.addDocument(doc);
|
||||||
|
}
|
||||||
|
|
||||||
|
HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg")
|
||||||
|
.field("field")
|
||||||
|
.interval(5)
|
||||||
|
.extendedBounds(-12, 13);
|
||||||
|
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE);
|
||||||
|
fieldType.setName("field");
|
||||||
|
try (IndexReader reader = w.getReader()) {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
Histogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
|
||||||
|
assertEquals(6, histogram.getBuckets().size());
|
||||||
|
assertEquals(-15d, histogram.getBuckets().get(0).getKey());
|
||||||
|
assertEquals(0, histogram.getBuckets().get(0).getDocCount());
|
||||||
|
assertEquals(-10d, histogram.getBuckets().get(1).getKey());
|
||||||
|
assertEquals(0, histogram.getBuckets().get(1).getDocCount());
|
||||||
|
assertEquals(-5d, histogram.getBuckets().get(2).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(2).getDocCount());
|
||||||
|
assertEquals(0d, histogram.getBuckets().get(3).getKey());
|
||||||
|
assertEquals(2, histogram.getBuckets().get(3).getDocCount());
|
||||||
|
assertEquals(5d, histogram.getBuckets().get(4).getKey());
|
||||||
|
assertEquals(0, histogram.getBuckets().get(4).getDocCount());
|
||||||
|
assertEquals(10d, histogram.getBuckets().get(5).getKey());
|
||||||
|
assertEquals(0, histogram.getBuckets().get(5).getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,75 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.TestUtil;
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
|
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||||
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
|
public class InternalHistogramTests extends InternalAggregationTestCase<InternalHistogram> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected InternalHistogram createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
|
||||||
|
Map<String, Object> metaData) {
|
||||||
|
final boolean keyed = randomBoolean();
|
||||||
|
final DocValueFormat format = DocValueFormat.RAW;
|
||||||
|
final int base = randomInt(50) - 30;
|
||||||
|
final int numBuckets = randomInt(10);
|
||||||
|
final int interval = randomIntBetween(1, 3);
|
||||||
|
List<InternalHistogram.Bucket> buckets = new ArrayList<>();
|
||||||
|
for (int i = 0; i < numBuckets; ++i) {
|
||||||
|
final int docCount = TestUtil.nextInt(random(), 1, 50);
|
||||||
|
buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, InternalAggregations.EMPTY));
|
||||||
|
}
|
||||||
|
return new InternalHistogram(name, buckets, (InternalOrder) InternalHistogram.Order.KEY_ASC,
|
||||||
|
1, null, format, keyed, pipelineAggregators, metaData);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void assertReduced(InternalHistogram reduced, List<InternalHistogram> inputs) {
|
||||||
|
Map<Double, Long> expectedCounts = new TreeMap<>();
|
||||||
|
for (Histogram histogram : inputs) {
|
||||||
|
for (Histogram.Bucket bucket : histogram.getBuckets()) {
|
||||||
|
expectedCounts.compute((Double) bucket.getKey(),
|
||||||
|
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Map<Double, Long> actualCounts = new TreeMap<>();
|
||||||
|
for (Histogram.Bucket bucket : reduced.getBuckets()) {
|
||||||
|
actualCounts.compute((Double) bucket.getKey(),
|
||||||
|
(key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount());
|
||||||
|
}
|
||||||
|
assertEquals(expectedCounts, actualCounts);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader<InternalHistogram> instanceReader() {
|
||||||
|
return InternalHistogram::new;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Loading…
Reference in New Issue