Add parsing for InternalPercentilesBucket (#24330)
This commit is contained in:
parent
b46c39ea2e
commit
93415cdfb4
|
@ -33,8 +33,8 @@ import java.util.Map;
|
||||||
|
|
||||||
public abstract class ParsedPercentiles extends ParsedAggregation implements Iterable<Percentile> {
|
public abstract class ParsedPercentiles extends ParsedAggregation implements Iterable<Percentile> {
|
||||||
|
|
||||||
private final Map<Double, Double> percentiles = new LinkedHashMap<>();
|
protected final Map<Double, Double> percentiles = new LinkedHashMap<>();
|
||||||
private final Map<Double, String> percentilesAsString = new HashMap<>();
|
protected final Map<Double, String> percentilesAsString = new HashMap<>();
|
||||||
|
|
||||||
private boolean keyed;
|
private boolean keyed;
|
||||||
|
|
||||||
|
@ -130,7 +130,6 @@ public abstract class ParsedPercentiles extends ParsedAggregation implements Ite
|
||||||
if (token.isValue()) {
|
if (token.isValue()) {
|
||||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||||
aggregation.addPercentile(Double.valueOf(parser.currentName()), parser.doubleValue());
|
aggregation.addPercentile(Double.valueOf(parser.currentName()), parser.doubleValue());
|
||||||
|
|
||||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||||
int i = parser.currentName().indexOf("_as_string");
|
int i = parser.currentName().indexOf("_as_string");
|
||||||
if (i > 0) {
|
if (i > 0) {
|
||||||
|
@ -140,6 +139,8 @@ public abstract class ParsedPercentiles extends ParsedAggregation implements Ite
|
||||||
aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.valueOf(parser.text()));
|
aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.valueOf(parser.text()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||||
|
aggregation.addPercentile(Double.valueOf(parser.currentName()), Double.NaN);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
|
@ -162,6 +163,8 @@ public abstract class ParsedPercentiles extends ParsedAggregation implements Ite
|
||||||
} else if (CommonFields.VALUE_AS_STRING.getPreferredName().equals(currentFieldName)) {
|
} else if (CommonFields.VALUE_AS_STRING.getPreferredName().equals(currentFieldName)) {
|
||||||
valueAsString = parser.text();
|
valueAsString = parser.text();
|
||||||
}
|
}
|
||||||
|
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||||
|
value = Double.NaN;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (key != null) {
|
if (key != null) {
|
||||||
|
|
|
@ -0,0 +1,88 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map.Entry;
|
||||||
|
|
||||||
|
public class ParsedPercentilesBucket extends ParsedPercentiles implements Percentiles {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getType() {
|
||||||
|
return PercentilesBucketPipelineAggregationBuilder.NAME;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double percentile(double percent) throws IllegalArgumentException {
|
||||||
|
Double value = percentiles.get(percent);
|
||||||
|
if (value == null) {
|
||||||
|
throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" +
|
||||||
|
" one of the computed percentiles. Available keys are: " + percentiles.keySet());
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String percentileAsString(double percent) {
|
||||||
|
double value = percentile(percent); // check availability as unformatted value
|
||||||
|
String valueAsString = percentilesAsString.get(percent);
|
||||||
|
if (valueAsString != null) {
|
||||||
|
return valueAsString;
|
||||||
|
} else {
|
||||||
|
return Double.toString(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject("values");
|
||||||
|
for (Entry<Double, Double> percent : percentiles.entrySet()) {
|
||||||
|
double value = percent.getValue();
|
||||||
|
boolean hasValue = !(Double.isNaN(value));
|
||||||
|
Double key = percent.getKey();
|
||||||
|
builder.field(Double.toString(key), hasValue ? value : null);
|
||||||
|
String valueAsString = percentilesAsString.get(key);
|
||||||
|
if (hasValue && valueAsString != null) {
|
||||||
|
builder.field(key + "_as_string", valueAsString);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ObjectParser<ParsedPercentilesBucket, Void> PARSER =
|
||||||
|
new ObjectParser<>(ParsedPercentilesBucket.class.getSimpleName(), true, ParsedPercentilesBucket::new);
|
||||||
|
|
||||||
|
static {
|
||||||
|
ParsedPercentiles.declarePercentilesFields(PARSER);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ParsedPercentilesBucket fromXContent(XContentParser parser, String name) throws IOException {
|
||||||
|
ParsedPercentilesBucket aggregation = PARSER.parse(parser, null);
|
||||||
|
aggregation.setName(name);
|
||||||
|
return aggregation;
|
||||||
|
}
|
||||||
|
}
|
|
@ -63,6 +63,8 @@ import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.ParsedBucketMetricValue;
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.ParsedBucketMetricValue;
|
||||||
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.ParsedPercentilesBucket;
|
||||||
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket;
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder;
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder;
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder;
|
||||||
|
@ -99,6 +101,7 @@ public abstract class InternalAggregationTestCase<T extends InternalAggregation>
|
||||||
namedXContents.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c));
|
namedXContents.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c));
|
||||||
namedXContents.put(InternalTDigestPercentiles.NAME, (p, c) -> ParsedTDigestPercentiles.fromXContent(p, (String) c));
|
namedXContents.put(InternalTDigestPercentiles.NAME, (p, c) -> ParsedTDigestPercentiles.fromXContent(p, (String) c));
|
||||||
namedXContents.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c));
|
namedXContents.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c));
|
||||||
|
namedXContents.put(PercentilesBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c));
|
||||||
namedXContents.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c));
|
namedXContents.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c));
|
||||||
namedXContents.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c));
|
namedXContents.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c));
|
||||||
namedXContents.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c));
|
namedXContents.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c));
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -39,7 +40,7 @@ public abstract class AbstractPercentilesTestCase<T extends InternalAggregation
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void init() {
|
public void init() {
|
||||||
percents = randomPercents();
|
percents = randomPercents(false);
|
||||||
keyed = randomBoolean();
|
keyed = randomBoolean();
|
||||||
docValueFormat = randomNumericDocValueFormat();
|
docValueFormat = randomNumericDocValueFormat();
|
||||||
}
|
}
|
||||||
|
@ -70,12 +71,15 @@ public abstract class AbstractPercentilesTestCase<T extends InternalAggregation
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static double[] randomPercents() {
|
public static double[] randomPercents(boolean sorted) {
|
||||||
List<Double> randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d);
|
List<Double> randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d);
|
||||||
double[] percents = new double[randomCdfValues.size()];
|
double[] percents = new double[randomCdfValues.size()];
|
||||||
for (int i = 0; i < randomCdfValues.size(); i++) {
|
for (int i = 0; i < randomCdfValues.size(); i++) {
|
||||||
percents[i] = randomCdfValues.get(i);
|
percents[i] = randomCdfValues.get(i);
|
||||||
}
|
}
|
||||||
|
if (sorted) {
|
||||||
|
Arrays.sort(percents);
|
||||||
|
}
|
||||||
return percents;
|
return percents;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,8 +23,8 @@ import org.HdrHistogram.DoubleHistogram;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase;
|
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase;
|
||||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
|
||||||
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
|
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -70,7 +70,7 @@ public class InternalHDRPercentilesTests extends InternalPercentilesTestCase<Int
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIterator() {
|
public void testIterator() {
|
||||||
final double[] percents = randomPercents();
|
final double[] percents = randomPercents(false);
|
||||||
final double[] values = new double[frequently() ? randomIntBetween(1, 10) : 0];
|
final double[] values = new double[frequently() ? randomIntBetween(1, 10) : 0];
|
||||||
for (int i = 0; i < values.length; ++i) {
|
for (int i = 0; i < values.length; ++i) {
|
||||||
values[i] = randomDouble();
|
values[i] = randomDouble();
|
||||||
|
|
|
@ -22,9 +22,11 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
|
||||||
|
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
|
||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -66,6 +68,22 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
|
||||||
return InternalPercentilesBucket::new;
|
return InternalPercentilesBucket::new;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected final void assertFromXContent(InternalPercentilesBucket aggregation, ParsedAggregation parsedAggregation) {
|
||||||
|
assertTrue(parsedAggregation instanceof ParsedPercentilesBucket);
|
||||||
|
ParsedPercentilesBucket parsedPercentiles = (ParsedPercentilesBucket) parsedAggregation;
|
||||||
|
|
||||||
|
for (Percentile percentile : aggregation) {
|
||||||
|
Double percent = percentile.getPercent();
|
||||||
|
assertEquals(aggregation.percentile(percent), parsedPercentiles.percentile(percent), 0);
|
||||||
|
// we cannot ensure we get the same as_string output for Double.NaN values since they are rendered as
|
||||||
|
// null and we don't have a formatted string representation in the rest output
|
||||||
|
if (Double.isNaN(aggregation.percentile(percent)) == false) {
|
||||||
|
assertEquals(aggregation.percentileAsString(percent), parsedPercentiles.percentileAsString(percent));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* check that we don't rely on the percent array order and that the iterator returns the values in the original order
|
* check that we don't rely on the percent array order and that the iterator returns the values in the original order
|
||||||
*/
|
*/
|
||||||
|
@ -89,4 +107,14 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
|
||||||
assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]",
|
assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]",
|
||||||
e.getMessage());
|
e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testParsedAggregationIteratorOrder() throws IOException {
|
||||||
|
final InternalPercentilesBucket aggregation = createTestInstance();
|
||||||
|
final Iterable<Percentile> parsedAggregation = parseAndAssert(aggregation, false);
|
||||||
|
Iterator<Percentile> it = aggregation.iterator();
|
||||||
|
Iterator<Percentile> parsedIt = parsedAggregation.iterator();
|
||||||
|
while (it.hasNext()) {
|
||||||
|
assertEquals(it.next(), parsedIt.next());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue