InternalPercentilesBucket should not rely on ordered percents array (#24336)

Currently InternalPercentilesBucket#percentile() relies on the percent array passed in
to be in sorted order. This changes the aggregation to store an internal lookup table that
is constructed from the percent/percentiles arrays passed in that can be used to look up 
the percentile values.

Closes #24331
This commit is contained in:
Christoph Büscher 2017-04-26 19:15:48 +02:00 committed by GitHub
parent 0c12d0ce37
commit db1b243343
3 changed files with 122 additions and 4 deletions

View File

@ -31,21 +31,35 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket {
private double[] percentiles;
private double[] percents;
private final transient Map<Double, Double> percentileLookups = new HashMap<>();
public InternalPercentilesBucket(String name, double[] percents, double[] percentiles,
DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
super(name, pipelineAggregators, metaData);
if ((percentiles.length == percents.length) == false) {
throw new IllegalArgumentException("The number of provided percents and percentiles didn't match. percents: "
+ Arrays.toString(percents) + ", percentiles: " + Arrays.toString(percentiles));
}
this.format = formatter;
this.percentiles = percentiles;
this.percents = percents;
computeLookup();
}
private void computeLookup() {
for (int i = 0; i < percents.length; i++) {
percentileLookups.put(percents[i], percentiles[i]);
}
}
/**
@ -56,6 +70,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
format = in.readNamedWriteable(DocValueFormat.class);
percentiles = in.readDoubleArray();
percents = in.readDoubleArray();
computeLookup();
}
@Override
@ -72,12 +87,12 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
@Override
public double percentile(double percent) throws IllegalArgumentException {
int index = Arrays.binarySearch(percents, percent);
if (index < 0) {
Double percentile = percentileLookups.get(percent);
if (percentile == null) {
throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" +
" one of the computed percentiles. Available keys are: " + Arrays.toString(percents));
}
return percentiles[index];
return percentile;
}
@Override
@ -116,6 +131,17 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
return builder;
}
@Override
protected boolean doEquals(Object obj) {
InternalPercentilesBucket that = (InternalPercentilesBucket) obj;
return Arrays.equals(percents, that.percents) && Arrays.equals(percentiles, that.percentiles);
}
@Override
protected int doHashCode() {
return Objects.hash(Arrays.hashCode(percents), Arrays.hashCode(percentiles));
}
public static class Iter implements Iterator<Percentile> {
private final double[] percents;

View File

@ -50,7 +50,7 @@ public abstract class InternalPercentilesTestCase<T extends InternalAggregation>
protected abstract T createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData,
boolean keyed, DocValueFormat format, double[] percents, double[] values);
protected static double[] randomPercents() {
public static double[] randomPercents() {
List<Double> randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d);
double[] percents = new double[randomCdfValues.size()];
for (int i = 0; i < randomCdfValues.size(); i++) {

View File

@ -0,0 +1,92 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregationTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase.randomPercents;
public class InternalPercentilesBucketTests extends InternalAggregationTestCase<InternalPercentilesBucket> {
@Override
protected InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
return createTestInstance(name, pipelineAggregators, metaData, randomPercents());
}
private static InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, double[] percents) {
DocValueFormat format = randomNumericDocValueFormat();
final double[] percentiles = new double[percents.length];
for (int i = 0; i < percents.length; ++i) {
percentiles[i] = frequently() ? randomDouble() : Double.NaN;
}
return new InternalPercentilesBucket(name, percents, percentiles, format, pipelineAggregators, metaData);
}
@Override
public void testReduceRandom() {
expectThrows(UnsupportedOperationException.class,
() -> createTestInstance("name", Collections.emptyList(), null).reduce(null, null));
}
@Override
protected void assertReduced(InternalPercentilesBucket reduced, List<InternalPercentilesBucket> inputs) {
// no test since reduce operation is unsupported
}
@Override
protected Writeable.Reader<InternalPercentilesBucket> instanceReader() {
return InternalPercentilesBucket::new;
}
/**
* check that we don't rely on the percent array order and that the iterator returns the values in the original order
*/
public void testPercentOrder() {
final double[] percents = new double[]{ 0.50, 0.25, 0.01, 0.99, 0.60 };
InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(), percents);
Iterator<Percentile> iterator = aggregation.iterator();
for (double percent : percents) {
assertTrue(iterator.hasNext());
Percentile percentile = iterator.next();
assertEquals(percent, percentile.getPercent(), 0.0d);
assertEquals(aggregation.percentile(percent), percentile.getValue(), 0.0d);
}
}
public void testErrorOnDifferentArgumentSize() {
final double[] percents = new double[]{ 0.1, 0.2, 0.3};
final double[] percentiles = new double[]{ 0.10, 0.2};
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new InternalPercentilesBucket("test", percents,
percentiles, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()));
assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]",
e.getMessage());
}
}