Move pipeline agg validation to coordinating node (backport of #53669) (#54019)

This moves the pipeline aggregation validation from the data node to the
coordinating node so that we, eventually, can stop sending pipeline
aggregations to the data nodes entirely. In fact, it moves it into the
"request validation" stage so multiple errors can be accumulated and
sent back to the requester for the entire request. We can't always take
advantage of that, but it'll be nice for folks not to have to play
whack-a-mole with validation.

This is implemented by replacing `PipelineAggretionBuilder#validate`
with:
```
protected abstract void validate(ValidationContext context);
```

The `ValidationContext` handles the accumulation of validation failures,
provides access to the aggregation's siblings, and implements a few
validation utility methods.
This commit is contained in:
Nik Everett 2020-03-23 17:22:56 -04:00 committed by GitHub
parent 43199a8c82
commit b9bfba2c8b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
39 changed files with 608 additions and 602 deletions

View File

@ -277,6 +277,11 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
addValidationError("[request_cache] cannot be used in a scroll context", validationException);
}
}
if (source != null) {
if (source.aggregations() != null) {
validationException = source.aggregations().validate(validationException);
}
}
return validationException;
}

View File

@ -18,6 +18,7 @@
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
@ -283,8 +284,6 @@ public class AggregatorFactories {
return false;
}
public Builder addAggregator(AggregationBuilder factory) {
if (!names.add(factory.name)) {
throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
@ -298,15 +297,51 @@ public class AggregatorFactories {
return this;
}
/**
* Validate the root of the aggregation tree.
*/
public ActionRequestValidationException validate(ActionRequestValidationException e) {
PipelineAggregationBuilder.ValidationContext context =
PipelineAggregationBuilder.ValidationContext.forTreeRoot(aggregationBuilders, pipelineAggregatorBuilders, e);
validatePipelines(context);
return validateChildren(context.getValidationException());
}
/**
* Validate a the pipeline aggregations in this factory.
*/
private void validatePipelines(PipelineAggregationBuilder.ValidationContext context) {
List<PipelineAggregationBuilder> orderedPipelineAggregators;
try {
orderedPipelineAggregators = resolvePipelineAggregatorOrder(pipelineAggregatorBuilders, aggregationBuilders);
} catch (IllegalArgumentException iae) {
context.addValidationError(iae.getMessage());
return;
}
for (PipelineAggregationBuilder builder : orderedPipelineAggregators) {
builder.validate(context);
}
}
/**
* Validate a the children of this factory.
*/
private ActionRequestValidationException validateChildren(ActionRequestValidationException e) {
for (AggregationBuilder agg : aggregationBuilders) {
PipelineAggregationBuilder.ValidationContext context =
PipelineAggregationBuilder.ValidationContext.forInsideTree(agg, e);
agg.factoriesBuilder.validatePipelines(context);
e = agg.factoriesBuilder.validateChildren(context.getValidationException());
}
return e;
}
public AggregatorFactories build(QueryShardContext queryShardContext, AggregatorFactory parent) throws IOException {
if (aggregationBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) {
return EMPTY;
}
List<PipelineAggregationBuilder> orderedpipelineAggregators = null;
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregationBuilders);
for (PipelineAggregationBuilder builder : orderedpipelineAggregators) {
builder.validate(parent, aggregationBuilders, pipelineAggregatorBuilders);
}
List<PipelineAggregationBuilder> orderedPipelineAggregators =
resolvePipelineAggregatorOrder(pipelineAggregatorBuilders, aggregationBuilders);
AggregatorFactory[] aggFactories = new AggregatorFactory[aggregationBuilders.size()];
int i = 0;
@ -314,7 +349,7 @@ public class AggregatorFactories {
aggFactories[i] = agg.build(queryShardContext, parent);
++i;
}
return new AggregatorFactories(aggFactories, orderedpipelineAggregators);
return new AggregatorFactories(aggFactories, orderedPipelineAggregators);
}
private List<PipelineAggregationBuilder> resolvePipelineAggregatorOrder(

View File

@ -18,14 +18,20 @@
*/
package org.elasticsearch.search.aggregations;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
/**
* A factory that knows how to create an {@link PipelineAggregator} of a
@ -64,11 +70,145 @@ public abstract class PipelineAggregationBuilder implements NamedWriteable, Base
}
/**
* Internal: Validates the state of this factory (makes sure the factory is properly
* configured)
* Makes sure this builder is properly configured.
*/
protected abstract void validate(AggregatorFactory parent, Collection<AggregationBuilder> aggregationBuilders,
Collection<PipelineAggregationBuilder> pipelineAggregatorBuilders);
protected abstract void validate(ValidationContext context);
public abstract static class ValidationContext {
/**
* Build the context for the root of the aggregation tree.
*/
public static ValidationContext forTreeRoot(Collection<AggregationBuilder> siblingAggregations,
Collection<PipelineAggregationBuilder> siblingPipelineAggregations,
ActionRequestValidationException validationFailuresSoFar) {
return new ForTreeRoot(siblingAggregations, siblingPipelineAggregations, validationFailuresSoFar);
}
/**
* Build the context for a node inside the aggregation tree.
*/
public static ValidationContext forInsideTree(AggregationBuilder parent,
ActionRequestValidationException validationFailuresSoFar) {
return new ForInsideTree(parent, validationFailuresSoFar);
}
private ActionRequestValidationException e;
private ValidationContext(ActionRequestValidationException validationFailuresSoFar) {
this.e = validationFailuresSoFar;
}
private static class ForTreeRoot extends ValidationContext {
private final Collection<AggregationBuilder> siblingAggregations;
private final Collection<PipelineAggregationBuilder> siblingPipelineAggregations;
ForTreeRoot(Collection<AggregationBuilder> siblingAggregations,
Collection<PipelineAggregationBuilder> siblingPipelineAggregations,
ActionRequestValidationException validationFailuresSoFar) {
super(validationFailuresSoFar);
this.siblingAggregations = Objects.requireNonNull(siblingAggregations);
this.siblingPipelineAggregations = Objects.requireNonNull(siblingPipelineAggregations);
}
@Override
public Collection<AggregationBuilder> getSiblingAggregations() {
return siblingAggregations;
}
@Override
public Collection<PipelineAggregationBuilder> getSiblingPipelineAggregations() {
return siblingPipelineAggregations;
}
@Override
public void validateParentAggSequentiallyOrdered(String type, String name) {
addValidationError(type + " aggregation [" + name
+ "] must have a histogram, date_histogram or auto_date_histogram as parent but doesn't have a parent");
}
}
private static class ForInsideTree extends ValidationContext {
private final AggregationBuilder parent;
ForInsideTree(AggregationBuilder parent, ActionRequestValidationException validationFailuresSoFar) {
super(validationFailuresSoFar);
this.parent = Objects.requireNonNull(parent);
}
@Override
public Collection<AggregationBuilder> getSiblingAggregations() {
return parent.getSubAggregations();
}
@Override
public Collection<PipelineAggregationBuilder> getSiblingPipelineAggregations() {
return parent.getPipelineAggregations();
}
@Override
public void validateParentAggSequentiallyOrdered(String type, String name) {
if (parent instanceof HistogramAggregationBuilder) {
HistogramAggregationBuilder histoParent = (HistogramAggregationBuilder) parent;
if (histoParent.minDocCount() != 0) {
addValidationError(
"parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0");
}
} else if (parent instanceof DateHistogramAggregationBuilder) {
DateHistogramAggregationBuilder histoParent = (DateHistogramAggregationBuilder) parent;
if (histoParent.minDocCount() != 0) {
addValidationError(
"parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0");
}
} else if (parent instanceof AutoDateHistogramAggregationBuilder) {
// Nothing to check
} else {
addValidationError(
type + " aggregation [" + name + "] must have a histogram, date_histogram or auto_date_histogram as parent");
}
}
}
/**
* Aggregations that are siblings to the aggregation being validated.
*/
public abstract Collection<AggregationBuilder> getSiblingAggregations();
/**
* Pipeline aggregations that are siblings to the aggregation being validated.
*/
public abstract Collection<PipelineAggregationBuilder> getSiblingPipelineAggregations();
/**
* Add a validation error to this context. All validation errors
* are accumulated in a list and, if there are any, the request
* is not executed and the entire list is returned as the error
* response.
*/
public void addValidationError(String error) {
e = ValidateActions.addValidationError(error, e);
}
/**
* Add a validation error about the {@code buckets_path}.
*/
public void addBucketPathValidationError(String error) {
addValidationError(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + ' ' + error);
}
/**
* Validates that the parent is sequentially ordered.
*/
public abstract void validateParentAggSequentiallyOrdered(String type, String name);
/**
* The validation exception, if there is one. It'll be {@code null}
* if the context wasn't provided with any exception on creation
* and none were added.
*/
public ActionRequestValidationException getValidationException() {
return e;
}
}
/**
* Creates the pipeline aggregator

View File

@ -22,16 +22,10 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
@ -79,16 +73,6 @@ public abstract class AbstractPipelineAggregationBuilder<PAB extends AbstractPip
return type;
}
/**
* Validates the state of this factory (makes sure the factory is properly
* configured)
*/
@Override
public final void validate(AggregatorFactory parent, Collection<AggregationBuilder> factories,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
doValidate(parent, factories, pipelineAggregatorFactories);
}
protected abstract PipelineAggregator createInternal(Map<String, Object> metaData);
/**
@ -102,32 +86,6 @@ public abstract class AbstractPipelineAggregationBuilder<PAB extends AbstractPip
return aggregator;
}
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> factories,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
}
/**
* Validates pipeline aggregations that need sequentially ordered data.
*/
public static void validateSequentiallyOrderedParentAggs(AggregatorFactory parent, String type, String name) {
if ((parent instanceof HistogramAggregatorFactory || parent instanceof DateHistogramAggregatorFactory
|| parent instanceof AutoDateHistogramAggregatorFactory) == false) {
throw new IllegalStateException(
type + " aggregation [" + name + "] must have a histogram, date_histogram or auto_date_histogram as parent");
}
if (parent instanceof HistogramAggregatorFactory) {
HistogramAggregatorFactory histoParent = (HistogramAggregatorFactory) parent;
if (histoParent.minDocCount() != 0) {
throw new IllegalStateException("parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0");
}
} else if (parent instanceof DateHistogramAggregatorFactory) {
DateHistogramAggregatorFactory histoParent = (DateHistogramAggregatorFactory) parent;
if (histoParent.minDocCount() != 0) {
throw new IllegalStateException("parent histogram of " + type + " aggregation [" + name + "] must have min_doc_count of 0");
}
}
}
@SuppressWarnings("unchecked")
@Override
public PAB setMetaData(Map<String, Object> metaData) {

View File

@ -24,13 +24,10 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
@ -107,28 +104,27 @@ public abstract class BucketMetricsPipelineAggregationBuilder<AF extends BucketM
protected abstract PipelineAggregator createInternal(Map<String, Object> metaData);
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggBuilders,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
protected void validate(ValidationContext context) {
if (bucketsPaths.length != 1) {
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must contain a single entry for aggregation [" + name + "]");
context.addBucketPathValidationError("must contain a single entry for aggregation [" + name + "]");
return;
}
// Need to find the first agg name in the buckets path to check its a
// multi bucket agg: aggs are split with '>' and can optionally have a
// metric name after them by using '.' so need to split on both to get
// just the agg name
final String firstAgg = bucketsPaths[0].split("[>\\.]")[0];
Optional<AggregationBuilder> aggBuilder = aggBuilders.stream().filter((builder) -> builder.getName().equals(firstAgg))
Optional<AggregationBuilder> aggBuilder = context.getSiblingAggregations().stream()
.filter(builder -> builder.getName().equals(firstAgg))
.findAny();
if (aggBuilder.isPresent()) {
if ((aggBuilder.get() instanceof MultiBucketAggregationBuilder) == false) {
throw new IllegalArgumentException("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [" + name + "] found :"
+ aggBuilder.get().getClass().getName() + " for buckets path: " + bucketsPaths[0]);
}
} else {
throw new IllegalArgumentException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [" + name + "]: " + bucketsPaths[0]);
if (false == aggBuilder.isPresent()) {
context.addBucketPathValidationError("aggregation does not exist for aggregation [" + name + "]: " + bucketsPaths[0]);
return;
}
if ((aggBuilder.get() instanceof MultiBucketAggregationBuilder) == false) {
context.addValidationError("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [" + name + "] found :"
+ aggBuilder.get().getClass().getName() + " for buckets path: " + bucketsPaths[0]);
}
}

View File

@ -198,6 +198,11 @@ public class BucketScriptPipelineAggregationBuilder extends AbstractPipelineAggr
return builder;
}
@Override
protected void validate(ValidationContext context) {
// Nothing to check
}
@Override
protected boolean overrideBucketsPath() {
return true;

View File

@ -193,6 +193,11 @@ public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAg
return factory;
}
@Override
protected void validate(ValidationContext context) {
// Nothing to check
}
@Override
protected boolean overrideBucketsPath() {
return true;

View File

@ -25,9 +25,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
@ -36,7 +33,6 @@ import org.elasticsearch.search.sort.SortBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
@ -144,10 +140,9 @@ public class BucketSortPipelineAggregationBuilder extends AbstractPipelineAggreg
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
protected void validate(ValidationContext context) {
if (sorts.isEmpty() && size == null && from == 0) {
throw new IllegalStateException("[" + name + "] is configured to perform nothing. Please set either of "
context.addValidationError("[" + name + "] is configured to perform nothing. Please set either of "
+ Arrays.asList(SearchSourceBuilder.SORT_FIELD.getPreferredName(), SIZE.getPreferredName(), FROM.getPreferredName())
+ " to use " + NAME);
}

View File

@ -25,13 +25,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -93,14 +89,11 @@ public class CumulativeSumPipelineAggregationBuilder extends AbstractPipelineAgg
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
protected void validate(ValidationContext context) {
if (bucketsPaths.length != 1) {
throw new IllegalStateException(BUCKETS_PATH.getPreferredName()
+ " must contain a single entry for aggregation [" + name + "]");
context.addBucketPathValidationError("must contain a single entry for aggregation [" + name + "]");
}
validateSequentiallyOrderedParentAggs(parent, NAME, name);
context.validateParentAggSequentiallyOrdered(NAME, name);
}
@Override

View File

@ -28,16 +28,12 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -152,14 +148,13 @@ public class DerivativePipelineAggregationBuilder extends AbstractPipelineAggreg
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
protected void validate(ValidationContext context) {
if (bucketsPaths.length != 1) {
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
context.addValidationError(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must contain a single entry for aggregation [" + name + "]");
}
validateSequentiallyOrderedParentAggs(parent, NAME, name);
context.validateParentAggSequentiallyOrdered(NAME, name);
}
@Override

View File

@ -22,12 +22,8 @@ package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
@ -80,13 +76,10 @@ public class ExtendedStatsBucketPipelineAggregationBuilder
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggBuilders,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
super.doValidate(parent, aggBuilders, pipelineAggregatorFactories);
if (sigma < 0.0 ) {
throw new IllegalStateException(ExtendedStatsBucketParser.SIGMA.getPreferredName()
+ " must be a non-negative double");
protected void validate(ValidationContext context) {
super.validate(context);
if (sigma < 0.0) {
context.addValidationError(ExtendedStatsBucketParser.SIGMA.getPreferredName() + " must be a non-negative double");
}
}

View File

@ -19,6 +19,17 @@
package org.elasticsearch.search.aggregations.pipeline;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.GAP_POLICY;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
@ -29,23 +40,8 @@ import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.GAP_POLICY;
public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder<MovAvgPipelineAggregationBuilder> {
public static final String NAME = "moving_avg";
@ -258,20 +254,19 @@ public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregatio
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
protected void validate(ValidationContext context) {
if (minimize != null && minimize && !model.canBeMinimized()) {
// If the user asks to minimize, but this model doesn't support
// it, throw exception
throw new IllegalStateException("The [" + model + "] model cannot be minimized for aggregation [" + name + "]");
context.addValidationError("The [" + model + "] model cannot be minimized for aggregation [" + name + "]");
}
if (bucketsPaths.length != 1) {
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must contain a single entry for aggregation [" + name + "]");
context.addBucketPathValidationError("must contain a single entry for aggregation [" + name + "]");
return;
}
// Validate any model-specific window requirements
model.validate(window, name);
validateSequentiallyOrderedParentAggs(parent, NAME, name);
context.validateParentAggSequentiallyOrdered(NAME, name);
}
@Override

View File

@ -30,13 +30,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.io.IOException;
import java.util.Collection;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
@ -179,13 +175,11 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
protected void validate(ValidationContext context) {
if (window <= 0) {
throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer.");
context.addValidationError("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer.");
}
validateSequentiallyOrderedParentAggs(parent, NAME, name);
context.validateParentAggSequentiallyOrdered(NAME, name);
}
@Override

View File

@ -27,13 +27,9 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
@ -117,14 +113,13 @@ public class PercentilesBucketPipelineAggregationBuilder
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
super.doValidate(parent, aggFactories, pipelineAggregatorFactories);
protected void validate(ValidationContext context) {
super.validate(context);
for (Double p : percents) {
if (p == null || p < 0.0 || p > 100.0) {
throw new IllegalStateException(PERCENTS_FIELD.getPreferredName()
context.addValidationError(PERCENTS_FIELD.getPreferredName()
+ " must only contain non-null doubles from 0.0-100.0 inclusive");
return;
}
}
}

View File

@ -26,14 +26,10 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -138,11 +134,10 @@ public class SerialDiffPipelineAggregationBuilder extends AbstractPipelineAggreg
protected PipelineAggregator createInternal(Map<String, Object> metaData) {
return new SerialDiffPipelineAggregator(name, bucketsPaths, formatter(), gapPolicy, lag, metaData);
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
validateSequentiallyOrderedParentAggs(parent, NAME, name);
protected void validate(ValidationContext context) {
context.validateParentAggSequentiallyOrdered(NAME, name);
}
@Override

View File

@ -447,6 +447,9 @@ public class SearchModuleTests extends ESTestCase {
private static TestPipelineAggregationBuilder fromXContent(String name, XContentParser p) {
return null;
}
@Override
protected void validate(ValidationContext context) {}
}
/**

View File

@ -40,5 +40,4 @@ public abstract class AbstractBucketMetricsTestCase<PAF extends BucketMetricsPip
}
protected abstract PAF doCreateTestAggregatorFactory(String name, String bucketsPath);
}

View File

@ -24,10 +24,12 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class AvgBucketTests extends AbstractBucketMetricsTestCase<AvgBucketPipelineAggregationBuilder> {
@Override
@ -43,23 +45,18 @@ public class AvgBucketTests extends AbstractBucketMetricsTestCase<AvgBucketPipel
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
final AvgBucketPipelineAggregationBuilder builder = new AvgBucketPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> builder.validate(null, aggBuilders, Collections.emptySet()));
assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage());
assertThat(validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"));
// Now try to point to a single bucket agg
AvgBucketPipelineAggregationBuilder builder2 = new AvgBucketPipelineAggregationBuilder("name", "global>metric");
ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet()));
assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
assertThat(validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "global>metric")), equalTo(
"Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric", ex.getMessage());
// Now try to point to a valid multi-bucket agg (no exception should be thrown)
AvgBucketPipelineAggregationBuilder builder3 = new AvgBucketPipelineAggregationBuilder("name", "terms>metric");
builder3.validate(null, aggBuilders, Collections.emptySet());
+ " for buckets path: global>metric;"));
// Now try to point to a valid multi-bucket agg which is valid
assertThat(validate(aggBuilders, new AvgBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -460,21 +460,21 @@ public class BucketSortIT extends ESIntegTestCase {
}
public void testInvalidPath() {
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class,
Exception e = expectThrows(ActionRequestValidationException.class,
() -> client().prepareSearch(INDEX)
.addAggregation(terms("foos").field(TERM_FIELD)
.subAggregation(bucketSort("bucketSort", Arrays.asList(new FieldSortBuilder("invalid")))))
.get());
assertThat(e.getCause().getMessage(), containsString("No aggregation found for path [invalid]"));
assertThat(e.getMessage(), containsString("No aggregation found for path [invalid]"));
}
public void testNeitherSortsNorSizeSpecifiedAndFromIsDefault_ShouldThrowValidation() {
SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class,
Exception e = expectThrows(ActionRequestValidationException.class,
() -> client().prepareSearch(INDEX)
.addAggregation(terms("foos").field(TERM_FIELD)
.subAggregation(bucketSort("bucketSort", Collections.emptyList())))
.get());
assertThat(e.getCause().getMessage(), containsString("[bucketSort] is configured to perform nothing." +
assertThat(e.getMessage(), containsString("[bucketSort] is configured to perform nothing." +
" Please set either of [sort, size, from] to use bucket_sort"));
}
}

View File

@ -38,8 +38,6 @@ import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.TestAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
@ -52,10 +50,7 @@ import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import static org.hamcrest.Matchers.equalTo;
@ -291,34 +286,6 @@ public class CumulativeSumAggregatorTests extends AggregatorTestCase {
}
});
}
/**
* The validation should verify the parent aggregation is allowed.
*/
public void testValidate() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(new CumulativeSumPipelineAggregationBuilder("cusum", "sum"));
final CumulativeSumPipelineAggregationBuilder builder = new CumulativeSumPipelineAggregationBuilder("name", "valid");
builder.validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), Collections.emptySet(), aggBuilders);
}
/**
* The validation should throw an IllegalArgumentException, since parent
* aggregation is not a type of HistogramAggregatorFactory,
* DateHistogramAggregatorFactory or AutoDateHistogramAggregatorFactory.
*/
public void testValidateException() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(new CumulativeSumPipelineAggregationBuilder("cusum", "sum"));
TestAggregatorFactory parentFactory = TestAggregatorFactory.createInstance();
final CumulativeSumPipelineAggregationBuilder builder = new CumulativeSumPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalStateException ex = expectThrows(IllegalStateException.class,
() -> builder.validate(parentFactory, Collections.emptySet(), aggBuilders));
assertEquals("cumulative_sum aggregation [name] must have a histogram, date_histogram or auto_date_histogram as parent",
ex.getMessage());
}
private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consumer<InternalAggregation> verify) throws IOException {
executeTestCase(query, aggBuilder, verify, indexWriter -> {

View File

@ -19,10 +19,18 @@
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
public class CumulativeSumTests extends BasePipelineAggregationTestCase<CumulativeSumPipelineAggregationBuilder> {
import java.io.IOException;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class CumulativeSumTests extends BasePipelineAggregationTestCase<CumulativeSumPipelineAggregationBuilder> {
@Override
protected CumulativeSumPipelineAggregationBuilder createTestAggregatorFactory() {
String name = randomAlphaOfLengthBetween(3, 20);
@ -34,4 +42,23 @@ public class CumulativeSumTests extends BasePipelineAggregationTestCase<Cumulati
return factory;
}
public void testValidate() throws IOException {
assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(),
new CumulativeSumPipelineAggregationBuilder("name", "valid")), nullValue());
}
public void testInvalidParent() throws IOException {
AggregationBuilder parent = mock(AggregationBuilder.class);
when(parent.getName()).thenReturn("name");
assertThat(validate(parent, new CumulativeSumPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: cumulative_sum aggregation [name] must have a histogram, date_histogram "
+ "or auto_date_histogram as parent;"));
}
public void testNoParent() throws IOException {
assertThat(validate(emptyList(), new CumulativeSumPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: cumulative_sum aggregation [name] must have a histogram, date_histogram "
+ "or auto_date_histogram as parent but doesn't have a parent;"));
}
}

View File

@ -19,16 +19,20 @@
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.TestAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class DerivativeTests extends BasePipelineAggregationTestCase<DerivativePipelineAggregationBuilder> {
@Override
@ -51,16 +55,13 @@ public class DerivativeTests extends BasePipelineAggregationTestCase<DerivativeP
}
return factory;
}
/**
* The validation should verify the parent aggregation is allowed.
*/
public void testValidate() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(new DerivativePipelineAggregationBuilder("deriv", "der"));
final DerivativePipelineAggregationBuilder builder = new DerivativePipelineAggregationBuilder("name", "valid");
builder.validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), Collections.emptySet(), aggBuilders);
assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(),
new DerivativePipelineAggregationBuilder("name", "valid")), nullValue());
}
/**
@ -71,12 +72,11 @@ public class DerivativeTests extends BasePipelineAggregationTestCase<DerivativeP
public void testValidateException() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(new DerivativePipelineAggregationBuilder("deriv", "der"));
TestAggregatorFactory parentFactory = TestAggregatorFactory.createInstance();
AggregationBuilder parent = mock(AggregationBuilder.class);
when(parent.getName()).thenReturn("name");
final DerivativePipelineAggregationBuilder builder = new DerivativePipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalStateException ex = expectThrows(IllegalStateException.class,
() -> builder.validate(parentFactory, Collections.emptySet(), aggBuilders));
assertEquals("derivative aggregation [name] must have a histogram, date_histogram or auto_date_histogram as parent",
ex.getMessage());
assertThat(validate(parent, new DerivativePipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: derivative aggregation [name] must have a histogram, "
+ "date_histogram or auto_date_histogram as parent;"));
}
}

View File

@ -26,11 +26,11 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase<ExtendedStatsBucketPipelineAggregationBuilder> {
@ -68,23 +68,17 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase<Exte
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
final ExtendedStatsBucketPipelineAggregationBuilder builder = new ExtendedStatsBucketPipelineAggregationBuilder("name",
"invalid_agg>metric");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> builder.validate(null, aggBuilders, Collections.emptySet()));
assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage());
assertThat(validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"));
// Now try to point to a single bucket agg
ExtendedStatsBucketPipelineAggregationBuilder builder2 = new ExtendedStatsBucketPipelineAggregationBuilder("name", "global>metric");
ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet()));
assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
assertThat(validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "global>metric")), equalTo(
"Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric", ex.getMessage());
+ " for buckets path: global>metric;"));
// Now try to point to a valid multi-bucket agg (no exception should be
// thrown)
ExtendedStatsBucketPipelineAggregationBuilder builder3 = new ExtendedStatsBucketPipelineAggregationBuilder("name", "terms>metric");
builder3.validate(null, aggBuilders, Collections.emptySet());
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new ExtendedStatsBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}

View File

@ -24,10 +24,12 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class MaxBucketTests extends AbstractBucketMetricsTestCase<MaxBucketPipelineAggregationBuilder> {
@Override
@ -43,23 +45,18 @@ public class MaxBucketTests extends AbstractBucketMetricsTestCase<MaxBucketPipel
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
final MaxBucketPipelineAggregationBuilder builder = new MaxBucketPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> builder.validate(null, aggBuilders, Collections.emptySet()));
assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage());
assertThat(validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"));
// Now try to point to a single bucket agg
MaxBucketPipelineAggregationBuilder builder2 = new MaxBucketPipelineAggregationBuilder("name", "global>metric");
ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet()));
assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
assertThat(validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "global>metric")), equalTo(
"Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric", ex.getMessage());
+ " for buckets path: global>metric;"));
// Now try to point to a valid multi-bucket agg (no exception should be
// thrown)
MaxBucketPipelineAggregationBuilder builder3 = new MaxBucketPipelineAggregationBuilder("name", "terms>metric");
builder3.validate(null, aggBuilders, Collections.emptySet());
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new MaxBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}

View File

@ -24,10 +24,12 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class MinBucketTests extends AbstractBucketMetricsTestCase<MinBucketPipelineAggregationBuilder> {
@Override
@ -43,23 +45,18 @@ public class MinBucketTests extends AbstractBucketMetricsTestCase<MinBucketPipel
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
final MinBucketPipelineAggregationBuilder builder = new MinBucketPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> builder.validate(null, aggBuilders, Collections.emptySet()));
assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage());
assertThat(validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"));
// Now try to point to a single bucket agg
MinBucketPipelineAggregationBuilder builder2 = new MinBucketPipelineAggregationBuilder("name", "global>metric");
ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet()));
assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
assertThat(validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "global>metric")), equalTo(
"Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric", ex.getMessage());
+ " for buckets path: global>metric;"));
// Now try to point to a valid multi-bucket agg (no exception should be
// thrown)
MinBucketPipelineAggregationBuilder builder3 = new MinBucketPipelineAggregationBuilder("name", "terms>metric");
builder3.validate(null, aggBuilders, Collections.emptySet());
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}

View File

@ -19,6 +19,29 @@
package org.elasticsearch.search.aggregations.pipeline;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.avg;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.min;
import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.derivative;
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.movingAvg;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
@ -35,28 +58,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuil
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.avg;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.min;
import static org.elasticsearch.search.aggregations.AggregationBuilders.range;
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.derivative;
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.movingAvg;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
@ESIntegTestCase.SuiteScopeTestCase
public class MovAvgIT extends ESIntegTestCase {
private static final String INTERVAL_FIELD = "l_value";
@ -737,7 +738,7 @@ public class MovAvgIT extends ESIntegTestCase {
).get();
fail("MovingAvg should not accept non-histogram as parent");
} catch (SearchPhaseExecutionException exception) {
} catch (ActionRequestValidationException exception) {
// All good
}
}
@ -850,7 +851,7 @@ public class MovAvgIT extends ESIntegTestCase {
public void testHoltWintersNotEnoughData() {
Client client = client();
expectThrows(SearchPhaseExecutionException.class, () -> client.prepareSearch("idx")
expectThrows(IllegalArgumentException.class, () -> client.prepareSearch("idx")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD).interval(interval)
.extendedBounds(0L, interval * (numBuckets - 1))
@ -1144,7 +1145,7 @@ public class MovAvgIT extends ESIntegTestCase {
.minimize(true))
).get();
fail("Simple Model cannot be minimized, but an exception was not thrown");
} catch (SearchPhaseExecutionException e) {
} catch (ActionRequestValidationException e) {
// All good
}
@ -1162,7 +1163,7 @@ public class MovAvgIT extends ESIntegTestCase {
.minimize(true))
).get();
fail("Linear Model cannot be minimized, but an exception was not thrown");
} catch (SearchPhaseExecutionException e) {
} catch (ActionRequestValidationException e) {
// all good
}
}

View File

@ -19,18 +19,18 @@
package org.elasticsearch.search.aggregations.pipeline;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import java.io.IOException;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.TestAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.HoltWintersModel.SeasonalityType;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class MovAvgTests extends BasePipelineAggregationTestCase<MovAvgPipelineAggregationBuilder> {
@Override
@ -120,11 +120,8 @@ public class MovAvgTests extends BasePipelineAggregationTestCase<MovAvgPipelineA
* The validation should verify the parent aggregation is allowed.
*/
public void testValidate() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(createTestAggregatorFactory());
final MovAvgPipelineAggregationBuilder builder = new MovAvgPipelineAggregationBuilder("name", "valid");
builder.validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), Collections.emptySet(), aggBuilders);
assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(),
new MovAvgPipelineAggregationBuilder("name", "valid")), nullValue());
}
/**
@ -133,14 +130,8 @@ public class MovAvgTests extends BasePipelineAggregationTestCase<MovAvgPipelineA
* DateHistogramAggregatorFactory or AutoDateHistogramAggregatorFactory.
*/
public void testValidateException() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(createTestAggregatorFactory());
TestAggregatorFactory parentFactory = TestAggregatorFactory.createInstance();
final MovAvgPipelineAggregationBuilder builder = new MovAvgPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalStateException ex = expectThrows(IllegalStateException.class,
() -> builder.validate(parentFactory, Collections.emptySet(), aggBuilders));
assertEquals("moving_avg aggregation [name] must have a histogram, date_histogram or auto_date_histogram as parent",
ex.getMessage());
assertThat(validate(emptyList(), new MovAvgPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: moving_avg aggregation [name] must have a histogram, date_histogram "
+ "or auto_date_histogram as parent but doesn't have a parent;"));
}
}

View File

@ -42,8 +42,6 @@ import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.TestAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
@ -53,16 +51,14 @@ import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.equalTo;
public class MovFnUnitTests extends AggregatorTestCase {
public class MovFnAggrgatorTests extends AggregatorTestCase {
private static final String DATE_FIELD = "date";
private static final String INSTANT_FIELD = "instant";
@ -172,34 +168,4 @@ public class MovFnUnitTests extends AggregatorTestCase {
private static long asLong(String dateTime) {
return DateFormatters.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(dateTime)).toInstant().toEpochMilli();
}
/**
* The validation should verify the parent aggregation is allowed.
*/
public void testValidate() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", Collections.emptyMap());
aggBuilders.add(new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, 3));
final MovFnPipelineAggregationBuilder builder = new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1);
builder.validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), Collections.emptySet(), aggBuilders);
}
/**
* The validation should throw an IllegalArgumentException, since parent
* aggregation is not a type of HistogramAggregatorFactory,
* DateHistogramAggregatorFactory or AutoDateHistogramAggregatorFactory.
*/
public void testValidateException() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", Collections.emptyMap());
aggBuilders.add(new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, 3));
TestAggregatorFactory parentFactory = TestAggregatorFactory.createInstance();
final MovFnPipelineAggregationBuilder builder = new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1);
IllegalStateException ex = expectThrows(IllegalStateException.class,
() -> builder.validate(parentFactory, Collections.emptySet(), aggBuilders));
assertEquals("moving_fn aggregation [name] must have a histogram, date_histogram or auto_date_histogram as parent",
ex.getMessage());
}
}

View File

@ -19,17 +19,23 @@
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
import java.io.IOException;
import java.util.Collections;
public class MovFnPipelineAggregationBuilderSerializationTests extends AbstractSerializingTestCase<MovFnPipelineAggregationBuilder> {
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class MovFnPipelineAggregationBuilderSerializationTests extends BasePipelineAggregationTestCase<MovFnPipelineAggregationBuilder> {
@Override
protected MovFnPipelineAggregationBuilder createTestInstance() {
protected MovFnPipelineAggregationBuilder createTestAggregatorFactory() {
MovFnPipelineAggregationBuilder builder = new MovFnPipelineAggregationBuilder(
randomAlphaOfLength(10),
"foo",
@ -40,19 +46,27 @@ public class MovFnPipelineAggregationBuilderSerializationTests extends AbstractS
return builder;
}
@Override
protected Writeable.Reader<MovFnPipelineAggregationBuilder> instanceReader() {
return MovFnPipelineAggregationBuilder::new;
public void testValidParent() throws IOException {
Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", emptyMap());
assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(),
new MovFnPipelineAggregationBuilder("mov_fn", "avg", script, 3)), nullValue());
}
@Override
protected MovFnPipelineAggregationBuilder doParseInstance(XContentParser parser) throws IOException {
return MovFnPipelineAggregationBuilder.parse(parser);
public void testInvalidParent() throws IOException {
Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", Collections.emptyMap());
AggregationBuilder parent = mock(AggregationBuilder.class);
when(parent.getName()).thenReturn("name");
assertThat(validate(parent, new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1)), equalTo(
"Validation Failed: 1: moving_fn aggregation [name] must have a histogram, date_histogram"
+ " or auto_date_histogram as parent;"));
}
@Override
protected boolean supportsUnknownFields() {
return false;
public void testNoParent() throws IOException {
Script script = new Script(Script.DEFAULT_SCRIPT_TYPE, "painless", "test", Collections.emptyMap());
assertThat(validate(emptyList(), new MovFnPipelineAggregationBuilder("name", "invalid_agg>metric", script, 1)), equalTo(
"Validation Failed: 1: moving_fn aggregation [name] must have a histogram, date_histogram"
+ " or auto_date_histogram as parent but doesn't have a parent;"));
}
}

View File

@ -26,11 +26,11 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class PercentilesBucketTests extends AbstractBucketMetricsTestCase<PercentilesBucketPipelineAggregationBuilder> {
@ -72,23 +72,17 @@ public class PercentilesBucketTests extends AbstractBucketMetricsTestCase<Percen
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
final PercentilesBucketPipelineAggregationBuilder builder = new PercentilesBucketPipelineAggregationBuilder("name",
"invalid_agg>metric");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> builder.validate(null, aggBuilders, Collections.emptySet()));
assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage());
assertThat(validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"));
// Now try to point to a single bucket agg
PercentilesBucketPipelineAggregationBuilder builder2 = new PercentilesBucketPipelineAggregationBuilder("name", "global>metric");
ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet()));
assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
assertThat(validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "global>metric")), equalTo(
"Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric", ex.getMessage());
+ " for buckets path: global>metric;"));
// Now try to point to a valid multi-bucket agg (no exception should be
// thrown)
PercentilesBucketPipelineAggregationBuilder builder3 = new PercentilesBucketPipelineAggregationBuilder("name", "terms>metric");
builder3.validate(null, aggBuilders, Collections.emptySet());
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new PercentilesBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}

View File

@ -20,29 +20,20 @@
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalOrder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import static org.mockito.Mockito.mock;
import java.util.function.Function;
/**
* Provides helper methods and classes for use in PipelineAggregation tests,
@ -155,28 +146,12 @@ public class PipelineAggregationHelperTests extends ESTestCase {
return 0.0;
}
static AggregatorFactory getRandomSequentiallyOrderedParentAgg() throws IOException {
AggregatorFactory factory = null;
switch (randomIntBetween(0, 2)) {
case 0:
factory = new HistogramAggregatorFactory("name", mock(ValuesSourceConfig.class), 0.0d, 0.0d,
mock(InternalOrder.class), false, 0L, 0.0d, 1.0d, mock(QueryShardContext.class), null,
new AggregatorFactories.Builder(), Collections.emptyMap());
break;
case 1:
factory = new DateHistogramAggregatorFactory("name", mock(ValuesSourceConfig.class),
mock(InternalOrder.class), false, 0L, mock(Rounding.class), mock(Rounding.class),
mock(ExtendedBounds.class), mock(QueryShardContext.class), mock(AggregatorFactory.class),
new AggregatorFactories.Builder(), Collections.emptyMap());
break;
case 2:
default:
AutoDateHistogramAggregationBuilder.RoundingInfo[] roundings = new AutoDateHistogramAggregationBuilder.RoundingInfo[1];
factory = new AutoDateHistogramAggregatorFactory("name", mock(ValuesSourceConfig.class),
1, roundings,
mock(QueryShardContext.class), null, new AggregatorFactories.Builder(), Collections.emptyMap());
}
return factory;
static AggregationBuilder getRandomSequentiallyOrderedParentAgg() throws IOException {
@SuppressWarnings("unchecked")
Function<String, AggregationBuilder> builder = randomFrom(
HistogramAggregationBuilder::new,
DateHistogramAggregationBuilder::new,
AutoDateHistogramAggregationBuilder::new);
return builder.apply("name");
}
}

View File

@ -19,16 +19,21 @@
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.TestAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SerialDifferenceTests extends BasePipelineAggregationTestCase<SerialDiffPipelineAggregationBuilder> {
@Override
@ -47,32 +52,28 @@ public class SerialDifferenceTests extends BasePipelineAggregationTestCase<Seria
}
return factory;
}
/**
* The validation should verify the parent aggregation is allowed.
*/
public void testValidate() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(createTestAggregatorFactory());
final SerialDiffPipelineAggregationBuilder builder = new SerialDiffPipelineAggregationBuilder("name", "valid");
builder.validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(), Collections.emptySet(), aggBuilders);
assertThat(validate(PipelineAggregationHelperTests.getRandomSequentiallyOrderedParentAgg(),
new SerialDiffPipelineAggregationBuilder("name", "valid")), nullValue());
}
/**
* The validation should throw an IllegalArgumentException, since parent
* aggregation is not a type of HistogramAggregatorFactory,
* DateHistogramAggregatorFactory or AutoDateHistogramAggregatorFactory.
*/
public void testValidateException() throws IOException {
public void testInvalidParent() throws IOException {
final Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(createTestAggregatorFactory());
TestAggregatorFactory parentFactory = TestAggregatorFactory.createInstance();
AggregationBuilder parent = mock(AggregationBuilder.class);
when(parent.getName()).thenReturn("name");
assertThat(validate(parent, new SerialDiffPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: serial_diff aggregation [name] must have a histogram, "
+ "date_histogram or auto_date_histogram as parent;"));
}
final SerialDiffPipelineAggregationBuilder builder = new SerialDiffPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalStateException ex = expectThrows(IllegalStateException.class,
() -> builder.validate(parentFactory, Collections.emptySet(), aggBuilders));
assertEquals("serial_diff aggregation [name] must have a histogram, date_histogram or auto_date_histogram as parent",
ex.getMessage());
public void testNoParent() {
assertThat(validate(emptyList(), new SerialDiffPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: serial_diff aggregation [name] must have a histogram, "
+ "date_histogram or auto_date_histogram as parent but doesn't have a parent;"));
}
}

View File

@ -24,10 +24,12 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class StatsBucketTests extends AbstractBucketMetricsTestCase<StatsBucketPipelineAggregationBuilder> {
@Override
@ -44,23 +46,18 @@ public class StatsBucketTests extends AbstractBucketMetricsTestCase<StatsBucketP
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
final StatsBucketPipelineAggregationBuilder builder = new StatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> builder.validate(null, aggBuilders, Collections.emptySet()));
assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage());
assertThat(validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"));
// Now try to point to a single bucket agg
StatsBucketPipelineAggregationBuilder builder2 = new StatsBucketPipelineAggregationBuilder("name", "global>metric");
ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet()));
assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
assertThat(validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "global>metric")), equalTo(
"Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric", ex.getMessage());
+ " for buckets path: global>metric;"));
// Now try to point to a valid multi-bucket agg (no exception should be
// thrown)
StatsBucketPipelineAggregationBuilder builder3 = new StatsBucketPipelineAggregationBuilder("name", "terms>metric");
builder3.validate(null, aggBuilders, Collections.emptySet());
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new StatsBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}

View File

@ -24,10 +24,12 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class SumBucketTests extends AbstractBucketMetricsTestCase<SumBucketPipelineAggregationBuilder> {
@Override
@ -43,23 +45,18 @@ public class SumBucketTests extends AbstractBucketMetricsTestCase<SumBucketPipel
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
final SumBucketPipelineAggregationBuilder builder = new SumBucketPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> builder.validate(null, aggBuilders, Collections.emptySet()));
assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage());
assertThat(validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "invalid_agg>metric")), equalTo(
"Validation Failed: 1: " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"));
// Now try to point to a single bucket agg
SumBucketPipelineAggregationBuilder builder2 = new SumBucketPipelineAggregationBuilder("name", "global>metric");
ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet()));
assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
assertThat(validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "global>metric")), equalTo(
"Validation Failed: 1: The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric", ex.getMessage());
+ " for buckets path: global>metric;"));
// Now try to point to a valid multi-bucket agg (no exception should be
// thrown)
SumBucketPipelineAggregationBuilder builder3 = new SumBucketPipelineAggregationBuilder("name", "terms>metric");
builder3.validate(null, aggBuilders, Collections.emptySet());
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new SumBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}

View File

@ -19,6 +19,16 @@
package org.elasticsearch.search.aggregations;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.hamcrest.Matchers.hasSize;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
@ -33,20 +43,13 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder.ValidationContext;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.hamcrest.Matchers.hasSize;
public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelineAggregationBuilder<AF>> extends ESTestCase {
protected static final String STRING_FIELD_NAME = "mapped_string";
@ -77,7 +80,7 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.build();
IndicesModule indicesModule = new IndicesModule(Collections.emptyList());
SearchModule searchModule = new SearchModule(settings, false, emptyList());
SearchModule searchModule = new SearchModule(settings, false, plugins());
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
@ -91,6 +94,13 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
}
}
/**
* Plugins to add to the test.
*/
protected List<SearchPlugin> plugins() {
return emptyList();
}
/**
* Generic test that creates new AggregatorFactory from the test
* AggregatorFactory and checks both for equality and asserts equality on
@ -198,4 +208,34 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
/**
* Helper for testing validation.
*/
protected String validate(AggregationBuilder parent, AF builder) {
return validate(ValidationContext.forInsideTree(parent, null), builder);
}
/**
* Helper for testing validation.
*/
protected String validate(Collection<AggregationBuilder> siblingAggregations, AF builder) {
return validate(siblingAggregations, emptyList(), builder);
}
/**
* Helper for testing validation.
*/
protected String validate(Collection<AggregationBuilder> siblingAggregations,
Collection<PipelineAggregationBuilder> siblingPipelineAggregations, AF builder) {
return validate(ValidationContext.forTreeRoot(siblingAggregations, siblingPipelineAggregations, null), builder);
}
/**
* Helper for testing validation.
*/
protected String validate(ValidationContext context, AF builder) {
builder.validate(context);
return context.getValidationException() == null ? null : context.getValidationException().getMessage();
}
}

View File

@ -10,20 +10,15 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketMetricsParser;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT;
public class CumulativeCardinalityPipelineAggregationBuilder
@ -90,14 +85,12 @@ public class CumulativeCardinalityPipelineAggregationBuilder
}
@Override
public void doValidate(AggregatorFactory parent, Collection<AggregationBuilder> aggFactories,
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
protected void validate(ValidationContext context) {
if (bucketsPaths.length != 1) {
throw new IllegalStateException(BUCKETS_PATH.getPreferredName()
+ " must contain a single entry for aggregation [" + name + "]");
context.addBucketPathValidationError("must contain a single entry for aggregation [" + name + "]");
}
validateSequentiallyOrderedParentAggs(parent, NAME, name);
context.validateParentAggSequentiallyOrdered(NAME, name);
}
@Override
@ -105,6 +98,7 @@ public class CumulativeCardinalityPipelineAggregationBuilder
if (format != null) {
builder.field(BucketMetricsParser.FORMAT.getPreferredName(), format);
}
builder.field(BUCKETS_PATH_FIELD.getPreferredName(), bucketsPaths[0]);
return builder;
}
@ -126,4 +120,9 @@ public class CumulativeCardinalityPipelineAggregationBuilder
public String getWriteableName() {
return NAME;
}
@Override
protected boolean overrideBucketsPath() {
return true;
}
}

View File

@ -1,56 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.common.util.MockPageCacheRecycler;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Test implementation for AggregatorFactory.
*/
public class StubAggregatorFactory extends AggregatorFactory {
private final Aggregator aggregator;
private StubAggregatorFactory(QueryShardContext queryShardContext, Aggregator aggregator) throws IOException {
super("_name", queryShardContext, null, new AggregatorFactories.Builder(), Collections.emptyMap());
this.aggregator = aggregator;
}
@Override
protected Aggregator createInternal(SearchContext searchContext,
Aggregator parent,
boolean collectsFromSingleBucket,
List list, Map metaData) throws IOException {
return aggregator;
}
public static StubAggregatorFactory createInstance() throws IOException {
BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
QueryShardContext queryShardContext = mock(QueryShardContext.class);
when(queryShardContext.bigArrays()).thenReturn(bigArrays);
Aggregator aggregator = mock(Aggregator.class);
return new StubAggregatorFactory(queryShardContext, aggregator);
}
}

View File

@ -16,46 +16,27 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.Rounding;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalOrder;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory;
import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.xpack.analytics.StubAggregatorFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public class CumulativeCardinalityAggregatorTests extends AggregatorTestCase {
@ -115,53 +96,6 @@ public class CumulativeCardinalityAggregatorTests extends AggregatorTestCase {
});
}
public void testParentValidations() throws IOException {
ValuesSourceConfig<ValuesSource> valuesSourceConfig = new ValuesSourceConfig<>(CoreValuesSourceType.NUMERIC);
// Histogram
Set<PipelineAggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(new CumulativeCardinalityPipelineAggregationBuilder("cumulative_card", "sum"));
AggregatorFactory parent = new HistogramAggregatorFactory("name", valuesSourceConfig, 0.0d, 0.0d,
mock(InternalOrder.class), false, 0L, 0.0d, 1.0d, mock(QueryShardContext.class), null,
new AggregatorFactories.Builder(), Collections.emptyMap());
CumulativeCardinalityPipelineAggregationBuilder builder
= new CumulativeCardinalityPipelineAggregationBuilder("name", "valid");
builder.validate(parent, Collections.emptySet(), aggBuilders);
// Date Histogram
aggBuilders.clear();
aggBuilders.add(new CumulativeCardinalityPipelineAggregationBuilder("cumulative_card", "sum"));
parent = new DateHistogramAggregatorFactory("name", valuesSourceConfig,
mock(InternalOrder.class), false, 0L, mock(Rounding.class), mock(Rounding.class),
mock(ExtendedBounds.class), mock(QueryShardContext.class), mock(AggregatorFactory.class),
new AggregatorFactories.Builder(), Collections.emptyMap());
builder = new CumulativeCardinalityPipelineAggregationBuilder("name", "valid");
builder.validate(parent, Collections.emptySet(), aggBuilders);
// Auto Date Histogram
ValuesSourceConfig<ValuesSource.Numeric> numericVS = new ValuesSourceConfig<>(CoreValuesSourceType.NUMERIC);
aggBuilders.clear();
aggBuilders.add(new CumulativeCardinalityPipelineAggregationBuilder("cumulative_card", "sum"));
AutoDateHistogramAggregationBuilder.RoundingInfo[] roundings = new AutoDateHistogramAggregationBuilder.RoundingInfo[1];
parent = new AutoDateHistogramAggregatorFactory("name", numericVS,
1, roundings,
mock(QueryShardContext.class), null, new AggregatorFactories.Builder(), Collections.emptyMap());
builder = new CumulativeCardinalityPipelineAggregationBuilder("name", "valid");
builder.validate(parent, Collections.emptySet(), aggBuilders);
// Mocked "test" agg, should fail validation
aggBuilders.clear();
aggBuilders.add(new CumulativeCardinalityPipelineAggregationBuilder("cumulative_card", "sum"));
StubAggregatorFactory parentFactory = StubAggregatorFactory.createInstance();
CumulativeCardinalityPipelineAggregationBuilder failBuilder
= new CumulativeCardinalityPipelineAggregationBuilder("name", "invalid_agg>metric");
IllegalStateException ex = expectThrows(IllegalStateException.class,
() -> failBuilder.validate(parentFactory, Collections.emptySet(), aggBuilders));
assertEquals("cumulative_cardinality aggregation [name] must have a histogram, date_histogram or auto_date_histogram as parent",
ex.getMessage());
}
public void testNonCardinalityAgg() {
Query query = new MatchAllDocsQuery();

View File

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.cumulativecardinality;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import java.io.IOException;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class CumulativeCardinalityTests extends BasePipelineAggregationTestCase<CumulativeCardinalityPipelineAggregationBuilder> {
@Override
protected List<SearchPlugin> plugins() {
return singletonList(new SearchPlugin() {
@Override
public List<PipelineAggregationSpec> getPipelineAggregations() {
return singletonList(new PipelineAggregationSpec(
CumulativeCardinalityPipelineAggregationBuilder.NAME,
CumulativeCardinalityPipelineAggregationBuilder::new,
CumulativeCardinalityPipelineAggregator::new,
CumulativeCardinalityPipelineAggregationBuilder.PARSER));
}
});
}
@Override
protected CumulativeCardinalityPipelineAggregationBuilder createTestAggregatorFactory() {
String name = randomAlphaOfLengthBetween(3, 20);
String bucketsPath = randomAlphaOfLengthBetween(3, 20);
CumulativeCardinalityPipelineAggregationBuilder builder =
new CumulativeCardinalityPipelineAggregationBuilder(name, bucketsPath);
if (randomBoolean()) {
builder.format(randomAlphaOfLengthBetween(1, 10));
}
return builder;
}
public void testParentValidations() throws IOException {
CumulativeCardinalityPipelineAggregationBuilder builder =
new CumulativeCardinalityPipelineAggregationBuilder("name", randomAlphaOfLength(5));
assertThat(validate(new HistogramAggregationBuilder("name"), builder), nullValue());
assertThat(validate(new DateHistogramAggregationBuilder("name"), builder), nullValue());
assertThat(validate(new AutoDateHistogramAggregationBuilder("name"), builder), nullValue());
// Mocked "test" agg, should fail validation
AggregationBuilder stubParent = mock(AggregationBuilder.class);
when(stubParent.getName()).thenReturn("name");
assertThat(validate(stubParent, builder), equalTo(
"Validation Failed: 1: cumulative_cardinality aggregation [name] must have a histogram, "
+ "date_histogram or auto_date_histogram as parent;"));
assertThat(validate(emptyList(), builder), equalTo(
"Validation Failed: 1: cumulative_cardinality aggregation [name] must have a histogram, "
+ "date_histogram or auto_date_histogram as parent but doesn't have a parent;"));
}
}