Added keyed response to pipeline percentile aggregations 22302 (#36392)

Closes #22302
This commit is contained in:
João Barbosa 2018-12-14 21:22:54 +00:00 committed by Mark Tozzi
parent c7ac9ef826
commit 276726aea2
7 changed files with 173 additions and 41 deletions

View File

@ -27,6 +27,7 @@ A `percentiles_bucket` aggregation looks like this in isolation:
details)|Optional | `skip` details)|Optional | `skip`
|`format` |format to apply to the output value of this aggregation |Optional | `null` |`format` |format to apply to the output value of this aggregation |Optional | `null`
|`percents` |The list of percentiles to calculate |Optional | `[ 1, 5, 25, 50, 75, 95, 99 ]` |`percents` |The list of percentiles to calculate |Optional | `[ 1, 5, 25, 50, 75, 95, 99 ]`
|`keyed` |Flag which returns the range as an hash instead of an array of key-value pairs |Optional | `true`
|=== |===
The following snippet calculates the percentiles for the total monthly `sales` buckets: The following snippet calculates the percentiles for the total monthly `sales` buckets:

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.pipeline; package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -39,9 +40,10 @@ import java.util.Objects;
public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket { public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket {
private double[] percentiles; private double[] percentiles;
private double[] percents; private double[] percents;
private boolean keyed = true;
private final transient Map<Double, Double> percentileLookups = new HashMap<>(); private final transient Map<Double, Double> percentileLookups = new HashMap<>();
InternalPercentilesBucket(String name, double[] percents, double[] percentiles, InternalPercentilesBucket(String name, double[] percents, double[] percentiles, boolean keyed,
DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators, DocValueFormat formatter, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) { Map<String, Object> metaData) {
super(name, pipelineAggregators, metaData); super(name, pipelineAggregators, metaData);
@ -52,6 +54,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
this.format = formatter; this.format = formatter;
this.percentiles = percentiles; this.percentiles = percentiles;
this.percents = percents; this.percents = percents;
this.keyed = keyed;
computeLookup(); computeLookup();
} }
@ -69,6 +72,11 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
format = in.readNamedWriteable(DocValueFormat.class); format = in.readNamedWriteable(DocValueFormat.class);
percentiles = in.readDoubleArray(); percentiles = in.readDoubleArray();
percents = in.readDoubleArray(); percents = in.readDoubleArray();
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
keyed = in.readBoolean();
}
computeLookup(); computeLookup();
} }
@ -77,6 +85,10 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
out.writeNamedWriteable(format); out.writeNamedWriteable(format);
out.writeDoubleArray(percentiles); out.writeDoubleArray(percentiles);
out.writeDoubleArray(percents); out.writeDoubleArray(percents);
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
out.writeBoolean(keyed);
}
} }
@Override @Override
@ -120,17 +132,33 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation
@Override @Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startObject("values"); if (keyed) {
for (double percent : percents) { builder.startObject("values");
double value = percentile(percent); for (double percent : percents) {
boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value)); double value = percentile(percent);
String key = String.valueOf(percent); boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value));
builder.field(key, hasValue ? value : null); String key = String.valueOf(percent);
if (hasValue && format != DocValueFormat.RAW) { builder.field(key, hasValue ? value : null);
builder.field(key + "_as_string", percentileAsString(percent)); if (hasValue && format != DocValueFormat.RAW) {
builder.field(key + "_as_string", percentileAsString(percent));
}
} }
builder.endObject();
} else {
builder.startArray("values");
for (double percent : percents) {
double value = percentile(percent);
boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value));
builder.startObject();
builder.field("key", percent);
builder.field("value", hasValue ? value : null);
if (hasValue && format != DocValueFormat.RAW) {
builder.field(String.valueOf(percent) + "_as_string", percentileAsString(percent));
}
builder.endObject();
}
builder.endArray();
} }
builder.endObject();
return builder; return builder;
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline;
import com.carrotsearch.hppc.DoubleArrayList; import com.carrotsearch.hppc.DoubleArrayList;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -40,8 +41,10 @@ public class PercentilesBucketPipelineAggregationBuilder
extends BucketMetricsPipelineAggregationBuilder<PercentilesBucketPipelineAggregationBuilder> { extends BucketMetricsPipelineAggregationBuilder<PercentilesBucketPipelineAggregationBuilder> {
public static final String NAME = "percentiles_bucket"; public static final String NAME = "percentiles_bucket";
static final ParseField PERCENTS_FIELD = new ParseField("percents"); static final ParseField PERCENTS_FIELD = new ParseField("percents");
static final ParseField KEYED_FIELD = new ParseField("keyed");
private double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 }; private double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 };
private boolean keyed = true;
public PercentilesBucketPipelineAggregationBuilder(String name, String bucketsPath) { public PercentilesBucketPipelineAggregationBuilder(String name, String bucketsPath) {
super(name, NAME, new String[] { bucketsPath }); super(name, NAME, new String[] { bucketsPath });
@ -54,24 +57,32 @@ public class PercentilesBucketPipelineAggregationBuilder
throws IOException { throws IOException {
super(in, NAME); super(in, NAME);
percents = in.readDoubleArray(); percents = in.readDoubleArray();
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
keyed = in.readBoolean();
}
} }
@Override @Override
protected void innerWriteTo(StreamOutput out) throws IOException { protected void innerWriteTo(StreamOutput out) throws IOException {
out.writeDoubleArray(percents); out.writeDoubleArray(percents);
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
out.writeBoolean(keyed);
}
} }
/** /**
* Get the percentages to calculate percentiles for in this aggregation * Get the percentages to calculate percentiles for in this aggregation
*/ */
public double[] percents() { public double[] getPercents() {
return percents; return percents;
} }
/** /**
* Set the percentages to calculate percentiles for in this aggregation * Set the percentages to calculate percentiles for in this aggregation
*/ */
public PercentilesBucketPipelineAggregationBuilder percents(double[] percents) { public PercentilesBucketPipelineAggregationBuilder setPercents(double[] percents) {
if (percents == null) { if (percents == null) {
throw new IllegalArgumentException("[percents] must not be null: [" + name + "]"); throw new IllegalArgumentException("[percents] must not be null: [" + name + "]");
} }
@ -85,9 +96,24 @@ public class PercentilesBucketPipelineAggregationBuilder
return this; return this;
} }
/**
* Set whether the XContent should be keyed
*/
public PercentilesBucketPipelineAggregationBuilder setKeyed(boolean keyed) {
this.keyed = keyed;
return this;
}
/**
* Get whether the XContent should be keyed
*/
public boolean getKeyed() {
return keyed;
}
@Override @Override
protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException { protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException {
return new PercentilesBucketPipelineAggregator(name, percents, bucketsPaths, gapPolicy(), formatter(), metaData); return new PercentilesBucketPipelineAggregator(name, percents, keyed, bucketsPaths, gapPolicy(), formatter(), metaData);
} }
@Override @Override
@ -108,6 +134,7 @@ public class PercentilesBucketPipelineAggregationBuilder
if (percents != null) { if (percents != null) {
builder.array(PERCENTS_FIELD.getPreferredName(), percents); builder.array(PERCENTS_FIELD.getPreferredName(), percents);
} }
builder.field(KEYED_FIELD.getPreferredName(), keyed);
return builder; return builder;
} }
@ -122,7 +149,11 @@ public class PercentilesBucketPipelineAggregationBuilder
double[] percents = (double[]) params.get(PERCENTS_FIELD.getPreferredName()); double[] percents = (double[]) params.get(PERCENTS_FIELD.getPreferredName());
if (percents != null) { if (percents != null) {
factory.percents(percents); factory.setPercents(percents);
}
Boolean keyed = (Boolean) params.get(KEYED_FIELD.getPreferredName());
if (keyed != null) {
factory.setKeyed(keyed);
} }
return factory; return factory;
@ -139,6 +170,10 @@ public class PercentilesBucketPipelineAggregationBuilder
params.put(PERCENTS_FIELD.getPreferredName(), percents.toArray()); params.put(PERCENTS_FIELD.getPreferredName(), percents.toArray());
return true; return true;
} }
else if (KEYED_FIELD.match(field, parser.getDeprecationHandler()) && token == XContentParser.Token.VALUE_BOOLEAN){
params.put(KEYED_FIELD.getPreferredName(), parser.booleanValue());
return true;
}
return false; return false;
} }
@ -146,13 +181,13 @@ public class PercentilesBucketPipelineAggregationBuilder
@Override @Override
protected int innerHashCode() { protected int innerHashCode() {
return Arrays.hashCode(percents); return Objects.hash(Arrays.hashCode(percents), keyed);
} }
@Override @Override
protected boolean innerEquals(BucketMetricsPipelineAggregationBuilder<PercentilesBucketPipelineAggregationBuilder> obj) { protected boolean innerEquals(BucketMetricsPipelineAggregationBuilder<PercentilesBucketPipelineAggregationBuilder> obj) {
PercentilesBucketPipelineAggregationBuilder other = (PercentilesBucketPipelineAggregationBuilder) obj; PercentilesBucketPipelineAggregationBuilder other = (PercentilesBucketPipelineAggregationBuilder) obj;
return Objects.deepEquals(percents, other.percents); return Objects.deepEquals(percents, other.percents) && Objects.equals(keyed, other.keyed);
} }
@Override @Override

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.pipeline; package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
@ -34,12 +35,14 @@ import java.util.Map;
public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator { public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator {
private final double[] percents; private final double[] percents;
private boolean keyed = true;
private List<Double> data; private List<Double> data;
PercentilesBucketPipelineAggregator(String name, double[] percents, String[] bucketsPaths, GapPolicy gapPolicy, PercentilesBucketPipelineAggregator(String name, double[] percents, boolean keyed, String[] bucketsPaths,
DocValueFormat formatter, Map<String, Object> metaData) { GapPolicy gapPolicy, DocValueFormat formatter, Map<String, Object> metaData) {
super(name, bucketsPaths, gapPolicy, formatter, metaData); super(name, bucketsPaths, gapPolicy, formatter, metaData);
this.percents = percents; this.percents = percents;
this.keyed = keyed;
} }
/** /**
@ -48,11 +51,19 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg
public PercentilesBucketPipelineAggregator(StreamInput in) throws IOException { public PercentilesBucketPipelineAggregator(StreamInput in) throws IOException {
super(in); super(in);
percents = in.readDoubleArray(); percents = in.readDoubleArray();
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
keyed = in.readBoolean();
}
} }
@Override @Override
public void innerWriteTo(StreamOutput out) throws IOException { public void innerWriteTo(StreamOutput out) throws IOException {
out.writeDoubleArray(percents); out.writeDoubleArray(percents);
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
out.writeBoolean(keyed);
}
} }
@Override @Override
@ -91,6 +102,6 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg
// todo need postCollection() to clean up temp sorted data? // todo need postCollection() to clean up temp sorted data?
return new InternalPercentilesBucket(name(), percents, percentiles, format, pipelineAggregators, metadata); return new InternalPercentilesBucket(name(), percents, percentiles, keyed, format, pipelineAggregators, metadata);
} }
} }

View File

@ -19,7 +19,11 @@
package org.elasticsearch.search.aggregations.pipeline; package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.Aggregation.CommonFields;
import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation;
@ -40,23 +44,29 @@ import java.util.Map;
import java.util.function.Predicate; import java.util.function.Predicate;
import static org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase.randomPercents; import static org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase.randomPercents;
import static org.hamcrest.Matchers.equalTo;
public class InternalPercentilesBucketTests extends InternalAggregationTestCase<InternalPercentilesBucket> { public class InternalPercentilesBucketTests extends InternalAggregationTestCase<InternalPercentilesBucket> {
@Override @Override
protected InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, protected InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) { Map<String, Object> metaData) {
return createTestInstance(name, pipelineAggregators, metaData, randomPercents()); return createTestInstance(name, pipelineAggregators, metaData, randomPercents(), true);
} }
private static InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators, private static InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, double[] percents) { Map<String, Object> metaData, double[] percents, boolean keyed) {
DocValueFormat format = randomNumericDocValueFormat();
final double[] percentiles = new double[percents.length]; final double[] percentiles = new double[percents.length];
for (int i = 0; i < percents.length; ++i) { for (int i = 0; i < percents.length; ++i) {
percentiles[i] = frequently() ? randomDouble() : Double.NaN; percentiles[i] = frequently() ? randomDouble() : Double.NaN;
} }
return new InternalPercentilesBucket(name, percents, percentiles, format, pipelineAggregators, metaData); return createTestInstance(name, pipelineAggregators, metaData, percents, percentiles, keyed);
}
private static InternalPercentilesBucket createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, double[] percents, double[] percentiles, boolean keyed) {
DocValueFormat format = randomNumericDocValueFormat();
return new InternalPercentilesBucket(name, percents, percentiles, keyed, format, pipelineAggregators, metaData);
} }
@Override @Override
@ -96,7 +106,8 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
*/ */
public void testPercentOrder() { public void testPercentOrder() {
final double[] percents = new double[]{ 0.50, 0.25, 0.01, 0.99, 0.60 }; final double[] percents = new double[]{ 0.50, 0.25, 0.01, 0.99, 0.60 };
InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(), percents); InternalPercentilesBucket aggregation = createTestInstance("test", Collections.emptyList(),
Collections.emptyMap(), percents, randomBoolean());
Iterator<Percentile> iterator = aggregation.iterator(); Iterator<Percentile> iterator = aggregation.iterator();
for (double percent : percents) { for (double percent : percents) {
assertTrue(iterator.hasNext()); assertTrue(iterator.hasNext());
@ -110,7 +121,7 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
final double[] percents = new double[]{ 0.1, 0.2, 0.3}; final double[] percents = new double[]{ 0.1, 0.2, 0.3};
final double[] percentiles = new double[]{ 0.10, 0.2}; final double[] percentiles = new double[]{ 0.10, 0.2};
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new InternalPercentilesBucket("test", percents, IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new InternalPercentilesBucket("test", percents,
percentiles, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap())); percentiles, randomBoolean(), DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()));
assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]", assertEquals("The number of provided percents and percentiles didn't match. percents: [0.1, 0.2, 0.3], percentiles: [0.1, 0.2]",
e.getMessage()); e.getMessage());
} }
@ -125,6 +136,52 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
} }
} }
public void testEmptyRanksXContent() throws IOException {
double[] percents = new double[]{1,2,3};
double[] percentiles = new double[3];
for (int i = 0; i < 3; ++i) {
percentiles[i] = randomBoolean() ? Double.NaN : Double.POSITIVE_INFINITY;
}
boolean keyed = randomBoolean();
InternalPercentilesBucket agg = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(),
percents, percentiles, keyed);
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
String expected;
if (keyed) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"2.0\" : null,\n" +
" \"3.0\" : null\n" +
" }\n" +
"}";
} else {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null\n" +
" }\n" +
" ]\n" +
"}";
}
assertThat(Strings.toString(builder), equalTo(expected));
}
@Override @Override
protected Predicate<String> excludePathsFromXContentInsertion() { protected Predicate<String> excludePathsFromXContentInsertion() {
return path -> path.endsWith(CommonFields.VALUES.getPreferredName()); return path -> path.endsWith(CommonFields.VALUES.getPreferredName());
@ -162,7 +219,7 @@ public class InternalPercentilesBucketTests extends InternalAggregationTestCase<
default: default:
throw new AssertionError("Illegal randomisation branch"); throw new AssertionError("Illegal randomisation branch");
} }
return new InternalPercentilesBucket(name, percents, percentiles, formatter, pipelineAggregators, metaData); return new InternalPercentilesBucket(name, percents, percentiles, randomBoolean(), formatter, pipelineAggregators, metaData);
} }
private double[] extractPercentiles(InternalPercentilesBucket instance) { private double[] extractPercentiles(InternalPercentilesBucket instance) {

View File

@ -100,7 +100,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)) .extendedBounds(minRandomValue, maxRandomValue))
.addAggregation(percentilesBucket("percentiles_bucket", "histo>_count") .addAggregation(percentilesBucket("percentiles_bucket", "histo>_count")
.percents(PERCENTS)).get(); .setPercents(PERCENTS)).get();
assertSearchResponse(response); assertSearchResponse(response);
@ -138,7 +138,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)) .extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count")
.percents(PERCENTS))).get(); .setPercents(PERCENTS))).get();
assertSearchResponse(response); assertSearchResponse(response);
@ -180,7 +180,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.prepareSearch("idx") .prepareSearch("idx")
.addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum")
.percents(PERCENTS)).get(); .setPercents(PERCENTS)).get();
assertSearchResponse(response); assertSearchResponse(response);
@ -254,7 +254,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.extendedBounds(minRandomValue, maxRandomValue) .extendedBounds(minRandomValue, maxRandomValue)
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum")
.percents(PERCENTS))).get(); .setPercents(PERCENTS))).get();
assertSearchResponse(response); assertSearchResponse(response);
@ -308,7 +308,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum")
.gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) .gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS)
.percents(PERCENTS))) .setPercents(PERCENTS)))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
@ -354,7 +354,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*"))
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum")
.percents(PERCENTS)).get(); .setPercents(PERCENTS)).get();
assertSearchResponse(response); assertSearchResponse(response);
@ -377,7 +377,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*"))
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum")
.percents(PERCENTS)).get(); .setPercents(PERCENTS)).get();
assertSearchResponse(response); assertSearchResponse(response);
@ -406,7 +406,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
client().prepareSearch("idx") client().prepareSearch("idx")
.addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum")
.percents(badPercents)).get(); .setPercents(badPercents)).get();
fail("Illegal percent's were provided but no exception was thrown."); fail("Illegal percent's were provided but no exception was thrown.");
} catch (Exception e) { } catch (Exception e) {
@ -440,7 +440,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)) .extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count")
.percents(badPercents))).get(); .setPercents(badPercents))).get();
fail("Illegal percent's were provided but no exception was thrown."); fail("Illegal percent's were provided but no exception was thrown.");
} catch (Exception e) { } catch (Exception e) {
@ -470,9 +470,9 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.subAggregation( .subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)) .extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count").percents(PERCENTS))) .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count").setPercents(PERCENTS)))
.addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket.50") .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket.50")
.percents(PERCENTS)).get(); .setPercents(PERCENTS)).get();
assertSearchResponse(response); assertSearchResponse(response);
@ -530,9 +530,9 @@ public class PercentilesBucketIT extends ESIntegTestCase {
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)) .extendedBounds(minRandomValue, maxRandomValue))
.subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count") .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count")
.percents(percent))) .setPercents(percent)))
.addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket[99.9]") .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket[99.9]")
.percents(percent)).get(); .setPercents(percent)).get();
assertSearchResponse(response); assertSearchResponse(response);

View File

@ -43,7 +43,7 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCase<Percen
for (int i = 0; i < numPercents; i++) { for (int i = 0; i < numPercents; i++) {
percents[i] = randomDoubleBetween(0.0, 100.0, false); percents[i] = randomDoubleBetween(0.0, 100.0, false);
} }
factory.percents(percents); factory.setPercents(percents);
} }
return factory; return factory;
} }
@ -61,7 +61,7 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCase<Percen
PercentilesBucketPipelineAggregationBuilder builder = (PercentilesBucketPipelineAggregationBuilder) parse(createParser(content)); PercentilesBucketPipelineAggregationBuilder builder = (PercentilesBucketPipelineAggregationBuilder) parse(createParser(content));
assertThat(builder.percents(), equalTo(new double[]{0.0, 20.0, 50.0, 75.99})); assertThat(builder.getPercents(), equalTo(new double[]{0.0, 20.0, 50.0, 75.99}));
} }
public void testValidate() { public void testValidate() {