mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-27 02:18:42 +00:00
Improve unit test coverage of aggs (#22668)
Add tests for `GlobalAggregator`, `MaxAggregator`, and `InternalMax`. Relates to #22278
This commit is contained in:
parent
24e2847af2
commit
e71b26f480
@ -464,7 +464,6 @@
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]filters[/\\]FiltersParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]filters[/\\]InternalFilters.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]geogrid[/\\]GeoHashGridAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]global[/\\]GlobalAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]histogram[/\\]HistogramAggregator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]InternalMissing.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]MissingAggregator.java" checks="LineLength" />
|
||||
|
@ -33,8 +33,8 @@ import java.util.Map;
|
||||
|
||||
public class GlobalAggregator extends SingleBucketAggregator {
|
||||
|
||||
public GlobalAggregator(String name, AggregatorFactories subFactories, SearchContext aggregationContext, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
public GlobalAggregator(String name, AggregatorFactories subFactories, SearchContext aggregationContext,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
super(name, subFactories, aggregationContext, null, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@ -59,6 +59,7 @@ public class GlobalAggregator extends SingleBucketAggregator {
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
throw new UnsupportedOperationException("global aggregations cannot serve as sub-aggregations, hence should never be called on #buildEmptyAggregations");
|
||||
throw new UnsupportedOperationException(
|
||||
"global aggregations cannot serve as sub-aggregations, hence should never be called on #buildEmptyAggregations");
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class InternalMax extends InternalNumericMetricsAggregation.SingleValue implements Max {
|
||||
private final double max;
|
||||
@ -88,4 +89,15 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue i
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
InternalMax other = (InternalMax) obj;
|
||||
return Objects.equals(max, other.max);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,74 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
public abstract class InternalAggregationTestCase<T extends InternalAggregation> extends AbstractWireSerializingTestCase<T> {
|
||||
private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(
|
||||
new SearchModule(Settings.EMPTY, false, emptyList()).getNamedWriteables());
|
||||
|
||||
protected abstract T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData);
|
||||
|
||||
public final void testReduceRandom() {
|
||||
List<T> inputs = new ArrayList<>();
|
||||
List<InternalAggregation> toReduce = new ArrayList<>();
|
||||
int toReduceSize = between(1, 200);
|
||||
for (int i = 0; i < toReduceSize; i++) {
|
||||
T t = createTestInstance();
|
||||
inputs.add(t);
|
||||
toReduce.add(t);
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
T reduced = (T) inputs.get(0).reduce(toReduce, null);
|
||||
assertReduced(reduced, inputs);
|
||||
}
|
||||
|
||||
protected abstract void assertReduced(T reduced, List<T> inputs);
|
||||
|
||||
@Override
|
||||
protected final T createTestInstance() {
|
||||
String name = randomAsciiOfLength(5);
|
||||
List<PipelineAggregator> pipelineAggregators = new ArrayList<>();
|
||||
// TODO populate pipelineAggregators
|
||||
Map<String, Object> metaData = new HashMap<>();
|
||||
int metaDataCount = randomBoolean() ? 0 : between(1, 10);
|
||||
while (metaData.size() < metaDataCount) {
|
||||
metaData.put(randomAsciiOfLength(5), randomAsciiOfLength(5));
|
||||
}
|
||||
return createTestInstance(name, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedWriteableRegistry getNamedWriteableRegistry() {
|
||||
return namedWriteableRegistry;
|
||||
}
|
||||
}
|
@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
|
||||
public class GlobalAggregatorTests extends AggregatorTestCase {
|
||||
public void testNoDocs() throws IOException {
|
||||
testCase(iw -> {
|
||||
// Intentionally not writing any docs
|
||||
}, (global, min) -> {
|
||||
assertEquals(0, global.getDocCount());
|
||||
assertEquals(Double.POSITIVE_INFINITY, min.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testSomeDocs() throws IOException {
|
||||
testCase(iw -> {
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("number", 7)));
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("number", 1)));
|
||||
}, (global, min) -> {
|
||||
assertEquals(2, global.getDocCount());
|
||||
assertEquals(1, min.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
// Note that `global`'s fancy support for ignoring the query comes from special code in AggregationPhase. We don't test that here.
|
||||
|
||||
private void testCase(CheckedConsumer<RandomIndexWriter, IOException> buildIndex, BiConsumer<InternalGlobal, InternalMin> verify)
|
||||
throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
GlobalAggregationBuilder aggregationBuilder = new GlobalAggregationBuilder("_name");
|
||||
aggregationBuilder.subAggregation(new MinAggregationBuilder("in_global").field("number"));
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||
fieldType.setName("number");
|
||||
try (GlobalAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) {
|
||||
try {
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
InternalGlobal result = (InternalGlobal) aggregator.buildAggregation(0L);
|
||||
verify.accept(result, (InternalMin) result.getAggregations().asMap().get("in_global"));
|
||||
} finally {
|
||||
IOUtils.close(aggregator.subAggregators());
|
||||
}
|
||||
}
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class InternalMaxTests extends InternalAggregationTestCase<InternalMax> {
|
||||
@Override
|
||||
protected InternalMax createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalMax(name, randomDouble(),
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators,
|
||||
metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<InternalMax> instanceReader() {
|
||||
return InternalMax::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertReduced(InternalMax reduced, List<InternalMax> inputs) {
|
||||
assertEquals(inputs.stream().mapToDouble(InternalMax::value).max().getAsDouble(), reduced.value(), 0);
|
||||
}
|
||||
}
|
@ -0,0 +1,124 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.common.CheckedConsumer;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
|
||||
public class MaxAggregatorTests extends AggregatorTestCase {
|
||||
public void testNoDocs() throws IOException {
|
||||
testCase(new MatchAllDocsQuery(), iw -> {
|
||||
// Intentionally not writing any docs
|
||||
}, max -> {
|
||||
assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testNoMatchingField() throws IOException {
|
||||
testCase(new MatchAllDocsQuery(), iw -> {
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7)));
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1)));
|
||||
}, max -> {
|
||||
assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testSomeMatchesSortedNumericDocValues() throws IOException {
|
||||
testCase(new FieldValueQuery("number"), iw -> {
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("number", 7)));
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField("number", 1)));
|
||||
}, max -> {
|
||||
assertEquals(7, max.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testSomeMatchesNumericDocValues() throws IOException {
|
||||
testCase(new FieldValueQuery("number"), iw -> {
|
||||
iw.addDocument(singleton(new NumericDocValuesField("number", 7)));
|
||||
iw.addDocument(singleton(new NumericDocValuesField("number", 1)));
|
||||
}, max -> {
|
||||
assertEquals(7, max.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
public void testQueryFiltering() throws IOException {
|
||||
testCase(IntPoint.newRangeQuery("number", 0, 5), iw -> {
|
||||
iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1)));
|
||||
}, max -> {
|
||||
assertEquals(1, max.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
public void testQueryFiltersAll() throws IOException {
|
||||
testCase(IntPoint.newRangeQuery("number", -1, 0), iw -> {
|
||||
iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7)));
|
||||
iw.addDocument(Arrays.asList(new IntPoint("number", 1), new SortedNumericDocValuesField("number", 1)));
|
||||
}, max -> {
|
||||
assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
private void testCase(Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalMax> verify)
|
||||
throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
buildIndex.accept(indexWriter);
|
||||
indexWriter.close();
|
||||
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
|
||||
|
||||
MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number");
|
||||
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
|
||||
fieldType.setName("number");
|
||||
try (MaxAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) {
|
||||
aggregator.preCollection();
|
||||
indexSearcher.search(query, aggregator);
|
||||
aggregator.postCollection();
|
||||
verify.accept((InternalMax) aggregator.buildAggregation(0L));
|
||||
}
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
}
|
@ -19,24 +19,20 @@
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.min;
|
||||
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class InternalMinTests extends AbstractWireSerializingTestCase<InternalMin> {
|
||||
|
||||
public class InternalMinTests extends InternalAggregationTestCase<InternalMin> {
|
||||
@Override
|
||||
protected InternalMin createTestInstance() {
|
||||
return new InternalMin(randomAsciiOfLengthBetween(1, 20), randomDouble(),
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), Collections.emptyList(),
|
||||
new HashMap<>());
|
||||
protected InternalMin createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalMin(name, randomDouble(),
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators,
|
||||
metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -45,18 +41,7 @@ public class InternalMinTests extends AbstractWireSerializingTestCase<InternalMi
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NamedWriteableRegistry getNamedWriteableRegistry() {
|
||||
List<Entry> entries = new ArrayList<>();
|
||||
entries.add(new NamedWriteableRegistry.Entry(DocValueFormat.class, DocValueFormat.BOOLEAN.getWriteableName(),
|
||||
in -> DocValueFormat.BOOLEAN));
|
||||
entries.add(new NamedWriteableRegistry.Entry(DocValueFormat.class, DocValueFormat.DateTime.NAME, DocValueFormat.DateTime::new));
|
||||
entries.add(new NamedWriteableRegistry.Entry(DocValueFormat.class, DocValueFormat.Decimal.NAME, DocValueFormat.Decimal::new));
|
||||
entries.add(new NamedWriteableRegistry.Entry(DocValueFormat.class, DocValueFormat.GEOHASH.getWriteableName(),
|
||||
in -> DocValueFormat.GEOHASH));
|
||||
entries.add(new NamedWriteableRegistry.Entry(DocValueFormat.class, DocValueFormat.IP.getWriteableName(), in -> DocValueFormat.IP));
|
||||
entries.add(
|
||||
new NamedWriteableRegistry.Entry(DocValueFormat.class, DocValueFormat.RAW.getWriteableName(), in -> DocValueFormat.RAW));
|
||||
return new NamedWriteableRegistry(entries);
|
||||
protected void assertReduced(InternalMin reduced, List<InternalMin> inputs) {
|
||||
assertEquals(inputs.stream().mapToDouble(InternalMin::value).min().getAsDouble(), reduced.value(), 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user