Add parsing for percentiles ranks (#23974)

This commit adds the logic for parsing the percentiles ranks aggregations.
This commit is contained in:
Tanguy Leroux 2017-04-18 10:19:30 +02:00 committed by GitHub
parent 5ccb4a0bbd
commit c0036d8516
10 changed files with 502 additions and 68 deletions

View File

@ -393,5 +393,22 @@ public interface DocValueFormat extends NamedWriteable {
public BytesRef parseBytesRef(String value) {
throw new UnsupportedOperationException();
}
@Override
public int hashCode() {
return Objects.hash(pattern);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Decimal that = (Decimal) o;
return Objects.equals(pattern, that.pattern);
}
}
}

View File

@ -0,0 +1,177 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
public abstract class AbstractParsedPercentiles extends ParsedAggregation implements Iterable<Percentile> {
private final Map<Double, Double> percentiles = new LinkedHashMap<>();
private final Map<Double, String> percentilesAsString = new HashMap<>();
private boolean keyed;
void addPercentile(Double key, Double value) {
percentiles.put(key, value);
}
void addPercentileAsString(Double key, String valueAsString) {
percentilesAsString.put(key, valueAsString);
}
Double getPercentile(double percent) {
if (percentiles.isEmpty()) {
return Double.NaN;
}
return percentiles.get(percent);
}
String getPercentileAsString(double percent) {
String valueAsString = percentilesAsString.get(percent);
if (valueAsString != null) {
return valueAsString;
}
Double value = getPercentile(percent);
if (value != null) {
return DocValueFormat.RAW.format(value);
}
return null;
}
void setKeyed(boolean keyed) {
this.keyed = keyed;
}
@Override
public Iterator<Percentile> iterator() {
return new Iterator<Percentile>() {
final Iterator<Map.Entry<Double, Double>> iterator = percentiles.entrySet().iterator();
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Percentile next() {
Map.Entry<Double, Double> next = iterator.next();
return new InternalPercentile(next.getKey(), next.getValue());
}
};
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
final boolean valuesAsString = (percentilesAsString.isEmpty() == false);
if (keyed) {
builder.startObject(CommonFields.VALUES.getPreferredName());
for (Map.Entry<Double, Double> percentile : percentiles.entrySet()) {
Double key = percentile.getKey();
builder.field(String.valueOf(key), percentile.getValue());
if (valuesAsString) {
builder.field(key + "_as_string", getPercentileAsString(key));
}
}
builder.endObject();
} else {
builder.startArray(CommonFields.VALUES.getPreferredName());
for (Map.Entry<Double, Double> percentile : percentiles.entrySet()) {
Double key = percentile.getKey();
builder.startObject();
{
builder.field(CommonFields.KEY.getPreferredName(), key);
builder.field(CommonFields.VALUE.getPreferredName(), percentile.getValue());
if (valuesAsString) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), getPercentileAsString(key));
}
}
builder.endObject();
}
builder.endArray();
}
return builder;
}
protected static void declarePercentilesFields(ObjectParser<? extends AbstractParsedPercentiles, Void> objectParser) {
ParsedAggregation.declareCommonFields(objectParser);
objectParser.declareField((parser, aggregation, context) -> {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) {
aggregation.setKeyed(true);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token.isValue()) {
if (token == XContentParser.Token.VALUE_NUMBER) {
aggregation.addPercentile(Double.valueOf(parser.currentName()), parser.doubleValue());
} else if (token == XContentParser.Token.VALUE_STRING) {
int i = parser.currentName().indexOf("_as_string");
if (i > 0) {
double key = Double.valueOf(parser.currentName().substring(0, i));
aggregation.addPercentileAsString(key, parser.text());
} else {
aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.valueOf(parser.text()));
}
}
}
}
} else if (token == XContentParser.Token.START_ARRAY) {
aggregation.setKeyed(false);
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Double key = null;
Double value = null;
String valueAsString = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
key = parser.doubleValue();
} else if (CommonFields.VALUE.getPreferredName().equals(currentFieldName)) {
value = parser.doubleValue();
} else if (CommonFields.VALUE_AS_STRING.getPreferredName().equals(currentFieldName)) {
valueAsString = parser.text();
}
}
}
if (key != null) {
aggregation.addPercentile(key, value);
if (valueAsString != null) {
aggregation.addPercentileAsString(key, valueAsString);
}
}
}
}
}, CommonFields.VALUES, ObjectParser.ValueType.OBJECT_ARRAY);
}
}

View File

@ -0,0 +1,33 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles;
public abstract class ParsedPercentileRanks extends AbstractParsedPercentiles implements PercentileRanks {
@Override
public double percent(double value) {
return getPercentile(value);
}
@Override
public String percentAsString(double value) {
return getPercentileAsString(value);
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks;
import java.io.IOException;
public class ParsedHDRPercentileRanks extends ParsedPercentileRanks {
@Override
protected String getType() {
return InternalHDRPercentileRanks.NAME;
}
private static ObjectParser<ParsedHDRPercentileRanks, Void> PARSER =
new ObjectParser<>(ParsedHDRPercentileRanks.class.getSimpleName(), true, ParsedHDRPercentileRanks::new);
static {
AbstractParsedPercentiles.declarePercentilesFields(PARSER);
}
public static ParsedHDRPercentileRanks fromXContent(XContentParser parser, String name) throws IOException {
ParsedHDRPercentileRanks aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks;
import java.io.IOException;
public class ParsedTDigestPercentileRanks extends ParsedPercentileRanks {
@Override
protected String getType() {
return InternalTDigestPercentileRanks.NAME;
}
private static ObjectParser<ParsedTDigestPercentileRanks, Void> PARSER =
new ObjectParser<>(ParsedTDigestPercentileRanks.class.getSimpleName(), true, ParsedTDigestPercentileRanks::new);
static {
AbstractParsedPercentiles.declarePercentilesFields(PARSER);
}
public static ParsedTDigestPercentileRanks fromXContent(XContentParser parser, String name) throws IOException {
ParsedTDigestPercentileRanks aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
}

View File

@ -19,27 +19,61 @@
package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.cardinality.ParsedCardinality;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentileRanks;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.containsString;
public abstract class InternalAggregationTestCase<T extends InternalAggregation> extends AbstractWireSerializingTestCase<T> {
private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(
new SearchModule(Settings.EMPTY, false, emptyList()).getNamedWriteables());
private final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(getNamedXContents());
static List<NamedXContentRegistry.Entry> getNamedXContents() {
Map<String, ContextParser<Object, ? extends Aggregation>> namedXContents = new HashMap<>();
namedXContents.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c));
namedXContents.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c));
namedXContents.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c));
return namedXContents.entrySet().stream()
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
.collect(Collectors.toList());
}
protected abstract T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData);
/** Return an instance on an unmapped field. */
@ -125,4 +159,54 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return namedWriteableRegistry;
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return namedXContentRegistry;
}
public final void testFromXContent() throws IOException {
final NamedXContentRegistry xContentRegistry = xContentRegistry();
final T aggregation = createTestInstance();
final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
final boolean humanReadable = randomBoolean();
final XContentType xContentType = randomFrom(XContentType.values());
final BytesReference originalBytes = toShuffledXContent(aggregation, xContentType, params, humanReadable);
Aggregation parsedAggregation;
try (XContentParser parser = xContentType.xContent().createParser(xContentRegistry, originalBytes)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
String currentName = parser.currentName();
int i = currentName.indexOf(InternalAggregation.TYPED_KEYS_DELIMITER);
String aggType = currentName.substring(0, i);
String aggName = currentName.substring(i + 1);
parsedAggregation = parser.namedObject(Aggregation.class, aggType, aggName);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
assertEquals(aggregation.getName(), parsedAggregation.getName());
assertEquals(aggregation.getMetaData(), parsedAggregation.getMetaData());
assertTrue(parsedAggregation instanceof ParsedAggregation);
assertEquals(aggregation.getType(), ((ParsedAggregation) parsedAggregation).getType());
final BytesReference parsedBytes = toXContent((ToXContent) parsedAggregation, xContentType, params, humanReadable);
assertToXContentEquivalent(originalBytes, parsedBytes, xContentType);
assertFromXContent(aggregation, (ParsedAggregation) parsedAggregation);
} catch (NamedXContentRegistry.UnknownNamedObjectException e) {
//norelease Remove this catch block when all aggregations can be parsed back.
assertThat(e.getMessage(), containsString("Unknown Aggregation"));
}
}
//norelease TODO make abstract
protected void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) {
}
}

View File

@ -19,33 +19,21 @@
package org.elasticsearch.search.aggregations.metrics.cardinality;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
public class InternalCardinalityTests extends InternalAggregationTestCase<InternalCardinality> {
private static List<HyperLogLogPlusPlus> algos;
private static int p;
@ -86,39 +74,13 @@ public class InternalCardinalityTests extends InternalAggregationTestCase<Intern
}
}
public void testFromXContent() throws IOException {
InternalCardinality cardinality = createTestInstance();
String type = cardinality.getWriteableName();
String name = cardinality.getName();
ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true"));
boolean humanReadable = randomBoolean();
XContentType xContentType = randomFrom(XContentType.values());
BytesReference originalBytes = toShuffledXContent(cardinality, xContentType, params, humanReadable);
ParsedCardinality parsed;
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals(type + "#" + name, parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
parsed = (ParsedCardinality) parser.namedObject(Aggregation.class, type, name);
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
assertNull(parser.nextToken());
}
assertEquals(cardinality.getName(), parsed.getName());
assertEquals(cardinality.getValue(), parsed.getValue(), Double.MIN_VALUE);
assertEquals(cardinality.getValueAsString(), parsed.getValueAsString());
assertEquals(cardinality.getMetaData(), parsed.getMetaData());
BytesReference finalAgg = XContentHelper.toXContent(parsed, xContentType, params, humanReadable);
assertToXContentEquivalent(originalBytes, finalAgg, xContentType);
}
@Override
protected NamedXContentRegistry xContentRegistry() {
NamedXContentRegistry.Entry entry = new NamedXContentRegistry.Entry(Aggregation.class,
new ParseField(CardinalityAggregationBuilder.NAME),
(parser, name) -> ParsedCardinality.fromXContent(parser, (String) name));
return new NamedXContentRegistry(Collections.singletonList(entry));
protected void assertFromXContent(InternalCardinality aggregation, ParsedAggregation parsedAggregation) {
assertTrue(parsedAggregation instanceof ParsedCardinality);
ParsedCardinality parsed = (ParsedCardinality) parsedAggregation;
assertEquals(aggregation.getValue(), parsed.getValue(), Double.MIN_VALUE);
assertEquals(aggregation.getValueAsString(), parsed.getValueAsString());
}
@After

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.percentiles;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
import java.util.Map;
public abstract class InternalPercentilesRanksTestCase<T extends InternalAggregation> extends InternalAggregationTestCase<T> {
@Override
protected final T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
final boolean keyed = randomBoolean();
final DocValueFormat format = randomFrom(DocValueFormat.RAW, new DocValueFormat.Decimal("###.##"));
List<Double> randomCdfValues = randomSubsetOf(randomIntBetween(1, 5), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d);
double[] cdfValues = new double[randomCdfValues.size()];
for (int i = 0; i < randomCdfValues.size(); i++) {
cdfValues[i] = randomCdfValues.get(i);
}
return createTestInstance(name, pipelineAggregators, metaData, cdfValues, keyed, format);
}
protected abstract T createTestInstance(String name, List<PipelineAggregator> aggregators, Map<String, Object> metadata,
double[] cdfValues, boolean keyed, DocValueFormat format);
@Override
protected final void assertFromXContent(T aggregation, ParsedAggregation parsedAggregation) {
assertTrue(aggregation instanceof PercentileRanks);
PercentileRanks percentileRanks = (PercentileRanks) aggregation;
assertTrue(parsedAggregation instanceof PercentileRanks);
PercentileRanks parsedPercentileRanks = (PercentileRanks) parsedAggregation;
for (Percentile percentile : percentileRanks) {
Double value = percentile.getValue();
assertEquals(percentileRanks.percent(value), parsedPercentileRanks.percent(value), 0);
assertEquals(percentileRanks.percentAsString(value), parsedPercentileRanks.percentAsString(value));
}
Class<? extends ParsedPercentileRanks> parsedClass = parsedParsedPercentileRanksClass();
assertNotNull(parsedClass);
assertTrue(parsedClass.isInstance(parsedAggregation));
}
protected abstract Class<? extends ParsedPercentileRanks> parsedParsedPercentileRanksClass();
}

View File

@ -22,28 +22,20 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
import org.HdrHistogram.DoubleHistogram;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
import java.util.Map;
public class InternalHDRPercentilesRanksTests extends InternalAggregationTestCase<InternalHDRPercentileRanks> {
public class InternalHDRPercentilesRanksTests extends InternalPercentilesRanksTestCase<InternalHDRPercentileRanks> {
@Override
protected InternalHDRPercentileRanks createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
double[] cdfValues = new double[] { 0.5 };
int numberOfSignificantValueDigits = 3;
DoubleHistogram state = new DoubleHistogram(numberOfSignificantValueDigits);
int numValues = randomInt(100);
for (int i = 0; i < numValues; ++i) {
state.recordValue(randomDouble());
}
boolean keyed = false;
DocValueFormat format = DocValueFormat.RAW;
return new InternalHDRPercentileRanks(name, cdfValues, state, keyed, format, pipelineAggregators, metaData);
protected InternalHDRPercentileRanks createTestInstance(String name, List<PipelineAggregator> aggregators, Map<String, Object> metadata,
double[] cdfValues, boolean keyed, DocValueFormat format) {
DoubleHistogram state = new DoubleHistogram(3);
return new InternalHDRPercentileRanks(name, cdfValues, state, keyed, format, aggregators, metadata);
}
@Override
@ -61,4 +53,8 @@ public class InternalHDRPercentilesRanksTests extends InternalAggregationTestCas
return InternalHDRPercentileRanks::new;
}
@Override
protected Class<? extends ParsedPercentileRanks> parsedParsedPercentileRanksClass() {
return ParsedHDRPercentileRanks.class;
}
}

View File

@ -21,26 +21,25 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.List;
import java.util.Map;
public class InternalTDigestPercentilesRanksTests extends InternalAggregationTestCase<InternalTDigestPercentileRanks> {
public class InternalTDigestPercentilesRanksTests extends InternalPercentilesRanksTestCase<InternalTDigestPercentileRanks> {
@Override
protected InternalTDigestPercentileRanks createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
double[] cdfValues = new double[] { 0.5 };
protected InternalTDigestPercentileRanks createTestInstance(String name, List<PipelineAggregator> aggregators,
Map<String, Object> metadata,
double[] cdfValues, boolean keyed, DocValueFormat format) {
TDigestState state = new TDigestState(100);
int numValues = randomInt(100);
for (int i = 0; i < numValues; ++i) {
state.add(randomDouble());
}
boolean keyed = false;
DocValueFormat format = DocValueFormat.RAW;
return new InternalTDigestPercentileRanks(name, cdfValues, state, keyed, format, pipelineAggregators, metaData);
return new InternalTDigestPercentileRanks(name, cdfValues, state, keyed, format, aggregators, metadata);
}
@Override
@ -71,4 +70,8 @@ public class InternalTDigestPercentilesRanksTests extends InternalAggregationTes
return InternalTDigestPercentileRanks::new;
}
@Override
protected Class<? extends ParsedPercentileRanks> parsedParsedPercentileRanksClass() {
return ParsedTDigestPercentileRanks.class;
}
}