[Test] Use appropriate DocValueFormats in Aggregations tests (#24155)
Some aggregations (like Min, Max etc) use a wrong DocValueFormat in tests (like IP or GeoHash). We should not test aggregations that expect a numeric value with a DocValueFormat like IP. Such wrong DocValueFormat can also prevent the aggregation to be rendered as ToXContent, and this will be an issue for the High Level Rest Client tests which expect to be able to parse back aggregations.
This commit is contained in:
parent
0b15fde27a
commit
829dd068d6
|
@ -393,5 +393,22 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
public BytesRef parseBytesRef(String value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Decimal that = (Decimal) o;
|
||||
return Objects.equals(pattern, that.pattern);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(pattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,9 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
|
@ -34,6 +31,9 @@ import org.elasticsearch.common.network.InetAddresses;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class DocValueFormatTests extends ESTestCase {
|
||||
|
||||
public void testSerialization() throws Exception {
|
||||
|
@ -108,6 +108,18 @@ public class DocValueFormatTests extends ESTestCase {
|
|||
DocValueFormat.IP.format(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1")))));
|
||||
}
|
||||
|
||||
public void testDecimalFormat() {
|
||||
DocValueFormat formatter = new DocValueFormat.Decimal("###.##");
|
||||
assertEquals("0", formatter.format(0.0d));
|
||||
assertEquals("1", formatter.format(1d));
|
||||
formatter = new DocValueFormat.Decimal("000.000");
|
||||
assertEquals("-000.500", formatter.format(-0.5));
|
||||
formatter = new DocValueFormat.Decimal("###,###.###");
|
||||
assertEquals("0.86", formatter.format(0.8598023539251286d));
|
||||
formatter = new DocValueFormat.Decimal("###,###.###");
|
||||
assertEquals("859,802.354", formatter.format(0.8598023539251286d * 1_000_000));
|
||||
}
|
||||
|
||||
public void testRawParse() {
|
||||
assertEquals(-1L, DocValueFormat.RAW.parseLong("-1", randomBoolean(), null));
|
||||
assertEquals(1L, DocValueFormat.RAW.parseLong("1", randomBoolean(), null));
|
||||
|
@ -145,4 +157,16 @@ public class DocValueFormatTests extends ESTestCase {
|
|||
assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))),
|
||||
DocValueFormat.IP.parseBytesRef("::1"));
|
||||
}
|
||||
|
||||
public void testDecimalParse() {
|
||||
DocValueFormat parser = new DocValueFormat.Decimal("###.##");
|
||||
assertEquals(0.0d, parser.parseDouble(randomFrom("0.0", "0", ".0", ".0000"), true, null), 0.0d);
|
||||
assertEquals(-1.0d, parser.parseDouble(randomFrom("-1.0", "-1", "-1.0", "-1.0000"), true, null), 0.0d);
|
||||
assertEquals(0.0d, parser.parseLong("0", true, null), 0.0d);
|
||||
assertEquals(1.0d, parser.parseLong("1", true, null), 0.0d);
|
||||
parser = new DocValueFormat.Decimal("###,###.###");
|
||||
assertEquals(859802.354d, parser.parseDouble("859,802.354", true, null), 0.0d);
|
||||
assertEquals(0.859d, parser.parseDouble("0.859", true, null), 0.0d);
|
||||
assertEquals(0.8598023539251286d, parser.parseDouble("0.8598023539251286", true, null), 0.0d);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.util.MockBigArrays;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
|
@ -33,6 +34,7 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
|
@ -122,4 +124,15 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
|
|||
protected NamedWriteableRegistry getNamedWriteableRegistry() {
|
||||
return namedWriteableRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a random {@link DocValueFormat} that can be used in aggregations which
|
||||
* compute numbers.
|
||||
*/
|
||||
protected static DocValueFormat randomNumericDocValueFormat() {
|
||||
final List<Supplier<DocValueFormat>> formats = new ArrayList<>(3);
|
||||
formats.add(() -> DocValueFormat.RAW);
|
||||
formats.add(() -> new DocValueFormat.Decimal(randomFrom("###.##", "###,###.##")));
|
||||
return randomFrom(formats).get();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
|
@ -47,14 +46,10 @@ public abstract class InternalSingleBucketAggregationTestCase<T extends Internal
|
|||
protected final T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
List<InternalAggregation> internal = new ArrayList<>();
|
||||
if (hasInternalMax) {
|
||||
internal.add(new InternalMax("max", randomDouble(),
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), emptyList(),
|
||||
emptyMap()));
|
||||
internal.add(new InternalMax("max", randomDouble(), randomNumericDocValueFormat(), emptyList(), emptyMap()));
|
||||
}
|
||||
if (hasInternalMin) {
|
||||
internal.add(new InternalMin("min", randomDouble(),
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), emptyList(),
|
||||
emptyMap()));
|
||||
internal.add(new InternalMin("min", randomDouble(), randomNumericDocValueFormat(), emptyList(), emptyMap()));
|
||||
}
|
||||
// we shouldn't use the full long range here since we sum doc count on reduce, and don't want to overflow the long range there
|
||||
long docCount = between(0, Integer.MAX_VALUE);
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
@ -31,9 +30,7 @@ import java.util.Map;
|
|||
public class InternalMaxTests extends InternalAggregationTestCase<InternalMax> {
|
||||
@Override
|
||||
protected InternalMax createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalMax(name, randomDouble(),
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators,
|
||||
metaData);
|
||||
return new InternalMax(name, randomDouble(), randomNumericDocValueFormat(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,8 +32,7 @@ public class InternalAvgTests extends InternalAggregationTestCase<InternalAvg> {
|
|||
@Override
|
||||
protected InternalAvg createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalAvg(name, randomDoubleBetween(0, 100000, true), randomNonNegativeLong() % 100000,
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators,
|
||||
metaData);
|
||||
randomNumericDocValueFormat(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -30,9 +30,7 @@ import java.util.Map;
|
|||
public class InternalMinTests extends InternalAggregationTestCase<InternalMin> {
|
||||
@Override
|
||||
protected InternalMin createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
return new InternalMin(name, randomDouble(),
|
||||
randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators,
|
||||
metaData);
|
||||
return new InternalMin(name, randomDouble(), randomNumericDocValueFormat(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,8 +32,7 @@ public class InternalSimpleValueTests extends InternalAggregationTestCase<Intern
|
|||
@Override
|
||||
protected InternalSimpleValue createTestInstance(String name,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
DocValueFormat formatter = randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH,
|
||||
DocValueFormat.IP, DocValueFormat.RAW);
|
||||
DocValueFormat formatter = randomNumericDocValueFormat();
|
||||
double value = randomDoubleBetween(0, 100000, true);
|
||||
return new InternalSimpleValue(name, value, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
|
|
@ -33,12 +33,10 @@ public class InternalDerivativeTests extends InternalAggregationTestCase<Interna
|
|||
@Override
|
||||
protected InternalDerivative createTestInstance(String name,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
DocValueFormat formatter = randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH,
|
||||
DocValueFormat.IP, DocValueFormat.RAW);
|
||||
DocValueFormat formatter = randomNumericDocValueFormat();
|
||||
double value = randomDoubleBetween(0, 100000, true);
|
||||
double normalizationFactor = randomDoubleBetween(0, 100000, true);
|
||||
return new InternalDerivative(name, value, normalizationFactor, formatter,
|
||||
pipelineAggregators, metaData);
|
||||
return new InternalDerivative(name, value, normalizationFactor, formatter, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in New Issue