From 11bafa18e1b1c6d9a95da57295fc9e2271f12ba7 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Thu, 14 Jan 2016 10:52:28 +0000 Subject: [PATCH] Removes Aggregation Builders in place of AggregatorFactory implementations --- .../percolate/PercolateRequestBuilder.java | 21 +- .../percolate/PercolateSourceBuilder.java | 38 ++- .../action/search/SearchRequestBuilder.java | 8 +- .../elasticsearch/search/SearchModule.java | 1 - .../AbstractAggregationBuilder.java | 45 --- .../aggregations/AggregationBuilder.java | 194 ------------ .../aggregations/AggregationBuilders.java | 140 +++++---- .../search/aggregations/Aggregator.java | 2 +- .../aggregations/AggregatorFactories.java | 73 +++-- .../aggregations/AggregatorFactory.java | 58 ++-- .../aggregations/AggregatorParsers.java | 14 +- .../ValuesSourceAggregationBuilder.java | 112 ------- .../bucket/children/ChildrenBuilder.java | 58 ---- .../bucket/children/ChildrenParser.java | 4 +- .../filter/FilterAggregationBuilder.java | 60 ---- .../bucket/filter/FilterParser.java | 4 +- .../filters/FiltersAggregationBuilder.java | 126 -------- .../bucket/filters/FiltersAggregator.java | 7 +- .../bucket/filters/FiltersParser.java | 7 +- .../bucket/geogrid/GeoHashGridBuilder.java | 97 ------ .../bucket/geogrid/GeoHashGridParser.java | 4 +- .../bucket/global/GlobalBuilder.java | 43 --- .../bucket/global/GlobalParser.java | 4 +- .../histogram/DateHistogramBuilder.java | 186 ------------ .../histogram/DateHistogramInterval.java | 18 ++ .../bucket/histogram/DateHistogramParser.java | 11 +- .../bucket/histogram/ExtendedBounds.java | 13 + .../bucket/histogram/HistogramAggregator.java | 39 ++- .../bucket/histogram/HistogramBuilder.java | 134 --------- .../bucket/histogram/HistogramParser.java | 4 +- .../bucket/missing/MissingBuilder.java | 57 ---- .../bucket/missing/MissingParser.java | 4 +- .../bucket/nested/NestedBuilder.java | 61 ---- .../bucket/nested/NestedParser.java | 4 +- .../bucket/nested/ReverseNestedBuilder.java | 59 ---- .../bucket/nested/ReverseNestedParser.java | 4 +- .../bucket/range/AbstractRangeBuilder.java | 81 ----- .../bucket/range/RangeAggregator.java | 121 ++++++-- .../bucket/range/RangeBuilder.java | 115 -------- .../bucket/range/RangeParser.java | 12 +- .../date/DateRangeAggregatorFactory.java | 221 +++++++++++++- .../bucket/range/date/DateRangeBuilder.java | 114 ------- .../bucket/range/date/DateRangeParser.java | 10 +- .../range/geodistance/GeoDistanceBuilder.java | 260 ---------------- .../range/geodistance/GeoDistanceParser.java | 109 +++++-- .../ipv4/IPv4RangeAggregatorFactory.java | 89 +++++- .../bucket/range/ipv4/IPv4RangeBuilder.java | 110 ------- .../bucket/range/ipv4/IpRangeParser.java | 10 +- .../DiversifiedSamplerAggregationBuilder.java | 79 ----- .../sampler/DiversifiedSamplerParser.java | 7 +- .../sampler/SamplerAggregationBuilder.java | 59 ---- .../bucket/sampler/SamplerAggregator.java | 6 +- .../bucket/sampler/SamplerParser.java | 4 +- .../SignificantTermsAggregatorFactory.java | 44 ++- .../significant/SignificantTermsBuilder.java | 277 ------------------ .../significant/SignificantTermsParser.java | 7 +- .../significant/heuristics/JLHScore.java | 5 +- .../heuristics/PercentageScore.java | 3 +- .../GlobalOrdinalsStringTermsAggregator.java | 2 +- .../bucket/terms/InternalOrder.java | 19 +- .../bucket/terms/InternalTerms.java | 2 +- .../bucket/terms/TermsAggregatorFactory.java | 91 ++++-- .../bucket/terms/TermsBuilder.java | 276 ----------------- .../bucket/terms/TermsParser.java | 8 +- .../bucket/terms/support/IncludeExclude.java | 50 ++++ .../metrics/MetricsAggregationBuilder.java | 44 --- ...ValuesSourceMetricsAggregationBuilder.java | 88 ------ .../aggregations/metrics/avg/AvgParser.java | 4 +- .../cardinality/CardinalityParser.java | 4 +- .../metrics/geobounds/GeoBoundsParser.java | 4 +- .../geocentroid/GeoCentroidParser.java | 4 +- .../aggregations/metrics/max/MaxParser.java | 4 +- .../aggregations/metrics/min/MinParser.java | 4 +- .../AbstractPercentilesBuilder.java | 87 ------ .../PercentileRanksAggregatorFactory.java | 230 +++++++++++++++ .../percentiles/PercentileRanksBuilder.java | 54 ---- .../percentiles/PercentileRanksParser.java | 48 ++- .../PercentilesAggregatorFactory.java | 230 +++++++++++++++ .../percentiles/PercentilesBuilder.java | 60 ---- .../percentiles/PercentilesMethod.java | 21 +- .../percentiles/PercentilesParser.java | 48 ++- .../hdr/HDRPercentileRanksAggregator.java | 124 -------- .../hdr/HDRPercentilesAggregator.java | 124 -------- .../TDigestPercentileRanksAggregator.java | 126 -------- .../tdigest/TDigestPercentilesAggregator.java | 126 -------- .../scripted/ScriptedMetricParser.java | 4 +- .../metrics/stats/StatsParser.java | 4 +- .../stats/extended/ExtendedStatsParser.java | 4 +- .../aggregations/metrics/sum/SumParser.java | 4 +- .../metrics/tophits/TopHitsParser.java | 5 +- .../metrics/valuecount/ValueCountParser.java | 4 +- .../pipeline/PipelineAggregatorBuilder.java | 84 ------ .../pipeline/PipelineAggregatorBuilders.java | 91 +++--- .../pipeline/PipelineAggregatorFactory.java | 27 +- .../bucketmetrics/BucketMetricsBuilder.java | 67 ----- .../bucketmetrics/BucketMetricsParser.java | 4 +- .../bucketmetrics/avg/AvgBucketBuilder.java | 30 -- .../bucketmetrics/avg/AvgBucketParser.java | 4 +- .../avg/AvgBucketPipelineAggregator.java | 6 +- .../bucketmetrics/max/MaxBucketBuilder.java | 30 -- .../bucketmetrics/max/MaxBucketParser.java | 4 +- .../max/MaxBucketPipelineAggregator.java | 6 +- .../bucketmetrics/min/MinBucketBuilder.java | 31 -- .../bucketmetrics/min/MinBucketParser.java | 4 +- .../min/MinBucketPipelineAggregator.java | 6 +- .../percentile/PercentilesBucketBuilder.java | 49 ---- .../percentile/PercentilesBucketParser.java | 4 +- .../PercentilesBucketPipelineAggregator.java | 6 +- .../stats/StatsBucketBuilder.java | 30 -- .../stats/StatsBucketParser.java | 4 +- .../stats/StatsBucketPipelineAggregator.java | 6 +- .../extended/ExtendedStatsBucketBuilder.java | 47 --- .../extended/ExtendedStatsBucketParser.java | 4 +- ...ExtendedStatsBucketPipelineAggregator.java | 6 +- .../bucketmetrics/sum/SumBucketBuilder.java | 30 -- .../bucketmetrics/sum/SumBucketParser.java | 4 +- .../sum/SumBucketPipelineAggregator.java | 6 +- .../bucketscript/BucketScriptBuilder.java | 83 ------ .../BucketScriptPipelineAggregator.java | 12 + .../cumulativesum/CumulativeSumBuilder.java | 48 --- .../cumulativesum/CumulativeSumParser.java | 3 +- .../CumulativeSumPipelineAggregator.java | 6 +- .../derivative/DerivativeBuilder.java | 78 ----- .../pipeline/derivative/DerivativeParser.java | 6 +- .../DerivativePipelineAggregator.java | 16 +- .../having/BucketSelectorBuilder.java | 76 ----- .../BucketSelectorPipelineAggregator.java | 12 + .../pipeline/movavg/MovAvgBuilder.java | 148 ---------- .../pipeline/movavg/MovAvgParser.java | 2 +- .../movavg/MovAvgPipelineAggregator.java | 22 +- .../pipeline/movavg/models/EwmaModel.java | 11 +- .../movavg/models/HoltLinearModel.java | 21 +- .../movavg/models/HoltWintersModel.java | 46 ++- .../pipeline/movavg/models/LinearModel.java | 5 + .../movavg/models/MovAvgModelBuilder.java | 2 + .../pipeline/movavg/models/SimpleModel.java | 5 + .../serialdiff/SerialDiffBuilder.java | 67 ----- .../pipeline/serialdiff/SerialDiffParser.java | 2 +- .../SerialDiffPipelineAggregator.java | 6 +- .../aggregations/support/ValueType.java | 26 +- .../ValuesSourceAggregatorFactory.java | 58 +--- .../support/ValuesSourceParser.java | 216 -------------- .../search/builder/SearchSourceBuilder.java | 8 +- .../cache/query/IndicesRequestCacheIT.java | 11 +- .../percolator/PercolatorAggregationsIT.java | 4 +- .../aggregations/AggregationsBinaryIT.java | 142 --------- .../search/aggregations/MissingValueIT.java | 10 +- .../aggregations/bucket/ChildrenIT.java | 17 +- .../aggregations/bucket/DateHistogramIT.java | 73 ++--- .../bucket/DateHistogramOffsetIT.java | 6 +- .../bucket/DateHistogramTests.java | 110 +++++++ .../aggregations/bucket/DateRangeTests.java | 10 +- .../bucket/DiversifiedSamplerIT.java | 30 +- .../bucket/DiversifiedSamplerTests.java | 4 +- .../search/aggregations/bucket/FilterIT.java | 14 +- .../search/aggregations/bucket/FiltersIT.java | 66 ++--- .../aggregations/bucket/GeoDistanceIT.java | 21 +- .../bucket/GeoDistanceRangeTests.java | 10 +- .../aggregations/bucket/GeoHashGridIT.java | 2 +- .../aggregations/bucket/IPv4RangeTests.java | 12 +- .../search/aggregations/bucket/NestedIT.java | 25 +- .../aggregations/bucket/RangeTests.java | 10 +- .../aggregations/bucket/ReverseNestedIT.java | 26 +- .../search/aggregations/bucket/SamplerIT.java | 16 +- .../aggregations/bucket/ShardReduceIT.java | 36 +-- .../bucket/SignificantTermsIT.java | 57 ++-- .../SignificantTermsSignificanceScoreIT.java | 95 +++--- .../bucket/SignificantTermsTests.java | 4 +- .../bucket/TermsShardMinDocCountIT.java | 18 +- .../aggregations/bucket/TermsTests.java | 4 +- .../SignificanceHeuristicTests.java | 16 +- .../aggregations/metrics/FiltersTests.java | 9 +- ...ksTests.java => PercentileRanksTests.java} | 12 +- ...ntilesTests.java => PercentilesTests.java} | 14 +- .../metrics/TDigestPercentileRanksTests.java | 64 ---- .../metrics/TDigestPercentilesTests.java | 69 ----- .../aggregations/metrics/TopHitsIT.java | 13 +- .../percentiles/PercentilesMethodTests.java | 81 +++++ .../aggregations/pipeline/AvgBucketIT.java | 31 +- .../pipeline/CumulativeSumIT.java | 11 +- .../pipeline/CumulativeSumTests.java | 5 +- .../pipeline/DateDerivativeIT.java | 24 +- .../aggregations/pipeline/DerivativeIT.java | 54 ++-- .../pipeline/DerivativeTests.java | 9 +- .../pipeline/ExtendedStatsBucketIT.java | 37 +-- .../aggregations/pipeline/MaxBucketIT.java | 38 +-- .../aggregations/pipeline/MinBucketIT.java | 31 +- .../pipeline/PercentilesBucketIT.java | 73 ++--- .../pipeline/SerialDifferenceTests.java | 5 +- .../aggregations/pipeline/StatsBucketIT.java | 31 +- .../aggregations/pipeline/SumBucketIT.java | 31 +- .../AbstractBucketMetricsTestCase.java | 7 +- .../bucketmetrics/AvgBucketTests.java | 4 +- .../ExtendedStatsBucketTests.java | 4 +- .../bucketmetrics/MaxBucketTests.java | 4 +- .../bucketmetrics/MinBucketTests.java | 4 +- .../bucketmetrics/PercentilesBucketTests.java | 4 +- .../bucketmetrics/StatsBucketTests.java | 4 +- .../bucketmetrics/SumBucketTests.java | 4 +- .../pipeline/moving/avg/MovAvgIT.java | 222 +++++--------- .../pipeline/moving/avg/MovAvgTests.java | 23 +- .../pipeline/serialdiff/SerialDiffIT.java | 20 +- .../basic/TransportTwoNodesSearchIT.java | 4 +- .../search/child/ChildQuerySearchIT.java | 2 +- .../SharedSignificantTermsTestMethods.java | 6 +- .../expression/MoreExpressionTests.java | 7 +- .../messy/tests/BucketScriptTests.java | 34 +-- .../messy/tests/BucketSelectorTests.java | 60 ++-- .../messy/tests/DateRangeTests.java | 4 +- .../messy/tests/DoubleTermsTests.java | 13 +- .../messy/tests/EquivalenceTests.java | 8 +- .../messy/tests/HDRPercentileRanksTests.java | 118 ++++---- .../messy/tests/HistogramTests.java | 7 +- .../messy/tests/LongTermsTests.java | 13 +- .../messy/tests/MinDocCountTests.java | 15 +- .../elasticsearch/messy/tests/RangeTests.java | 2 +- .../messy/tests/StringTermsTests.java | 37 ++- .../tests/TDigestPercentileRanksTests.java | 124 ++++---- .../messy/tests/TDigestPercentilesTests.java | 5 +- 219 files changed, 2843 insertions(+), 6346 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/AbstractAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/ValuesSourceAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesBuilder.java create mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorFactory.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksBuilder.java create mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorFactory.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffBuilder.java delete mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceParser.java delete mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java rename core/src/test/java/org/elasticsearch/search/aggregations/metrics/{HDRPercentileRanksTests.java => PercentileRanksTests.java} (82%) rename core/src/test/java/org/elasticsearch/search/aggregations/metrics/{HDRPercentilesTests.java => PercentilesTests.java} (82%) delete mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksTests.java delete mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java index e4b3a04fe1c..2a3b7ece886 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateRequestBuilder.java @@ -26,7 +26,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; @@ -152,16 +153,26 @@ public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder aggregationBuilder) { sourceBuilder().addAggregation(aggregationBuilder); return this; } /** - * Sets the percolate request definition directly on the request. - * This will overwrite any definitions set by any of the delegate methods. + * Delegates to + * {@link PercolateSourceBuilder#addAggregation(PipelineAggregatorFactory)} + */ + public PercolateRequestBuilder addAggregation(PipelineAggregatorFactory aggregationBuilder) { + sourceBuilder().addAggregation(aggregationBuilder); + return this; + } + + /** + * Sets the percolate request definition directly on the request. This will + * overwrite any definitions set by any of the delegate methods. */ public PercolateRequestBuilder setSource(PercolateSourceBuilder source) { sourceBuilder = source; diff --git a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java index b080039ed0d..fe625e6c913 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/PercolateSourceBuilder.java @@ -29,7 +29,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.SortBuilder; @@ -51,7 +52,8 @@ public class PercolateSourceBuilder extends ToXContentToBytes { private List sorts; private Boolean trackScores; private HighlightBuilder highlightBuilder; - private List aggregations; + private List> aggregationFactorys; + private List pipelineAggregationFactorys; /** * Sets the document to run the percolate queries against. @@ -123,11 +125,22 @@ public class PercolateSourceBuilder extends ToXContentToBytes { /** * Add an aggregation definition. */ - public PercolateSourceBuilder addAggregation(AbstractAggregationBuilder aggregationBuilder) { - if (aggregations == null) { - aggregations = new ArrayList<>(); + public PercolateSourceBuilder addAggregation(AggregatorFactory aggregationBuilder) { + if (aggregationFactorys == null) { + aggregationFactorys = new ArrayList<>(); } - aggregations.add(aggregationBuilder); + aggregationFactorys.add(aggregationBuilder); + return this; + } + + /** + * Add an aggregation definition. + */ + public PercolateSourceBuilder addAggregation(PipelineAggregatorFactory aggregationBuilder) { + if (pipelineAggregationFactorys == null) { + pipelineAggregationFactorys = new ArrayList<>(); + } + pipelineAggregationFactorys.add(aggregationBuilder); return this; } @@ -159,11 +172,18 @@ public class PercolateSourceBuilder extends ToXContentToBytes { if (highlightBuilder != null) { highlightBuilder.toXContent(builder, params); } - if (aggregations != null) { + if (aggregationFactorys != null || pipelineAggregationFactorys != null) { builder.field("aggregations"); builder.startObject(); - for (AbstractAggregationBuilder aggregation : aggregations) { - aggregation.toXContent(builder, params); + if (aggregationFactorys != null) { + for (AggregatorFactory aggregation : aggregationFactorys) { + aggregation.toXContent(builder, params); + } + } + if (pipelineAggregationFactorys != null) { + for (PipelineAggregatorFactory aggregation : pipelineAggregationFactorys) { + aggregation.toXContent(builder, params); + } } builder.endObject(); } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 1cfcc6ca362..9e2c0536b6f 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -28,8 +28,8 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.Template; import org.elasticsearch.search.Scroll; -import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -364,10 +364,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder aggregation) { sourceBuilder().aggregation(aggregation); return this; } @@ -375,7 +373,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder> extends AbstractAggregationBuilder { - - private List aggregations; - private List aggregatorFactories; - private List pipelineAggregatorFactories; - private BytesReference aggregationsBinary; - private Map metaData; - - /** - * Sole constructor, typically used by sub-classes. - */ - protected AggregationBuilder(String name, String type) { - super(name, type); - } - - /** - * Add a sub aggregation to this aggregation. NORELEASE REMOVE THIS WHEN AGG - * REFACTOR IS COMPLETE - */ - @SuppressWarnings("unchecked") - public B subAggregation(AbstractAggregationBuilder aggregation) { - if (aggregations == null) { - aggregations = new ArrayList<>(); - } - aggregations.add(aggregation); - return (B) this; - } - - /** - * Add a sub aggregation to this aggregation. - */ - @SuppressWarnings("unchecked") - public B subAggregation(AggregatorFactory aggregation) { - if (aggregatorFactories == null) { - aggregatorFactories = new ArrayList<>(); - } - aggregatorFactories.add(aggregation); - return (B) this; - } - - /** - * Add a sub aggregation to this aggregation. - */ - @SuppressWarnings("unchecked") - public B subAggregation(PipelineAggregatorFactory aggregation) { - if (pipelineAggregatorFactories == null) { - pipelineAggregatorFactories = new ArrayList<>(); - } - pipelineAggregatorFactories.add(aggregation); - return (B) this; - } - - /** - * Sets a raw (xcontent / json) sub addAggregation. - */ - @Deprecated - public B subAggregation(byte[] aggregationsBinary) { - return subAggregation(aggregationsBinary, 0, aggregationsBinary.length); - } - - /** - * Sets a raw (xcontent / json) sub addAggregation. - */ - @Deprecated - public B subAggregation(byte[] aggregationsBinary, int aggregationsBinaryOffset, int aggregationsBinaryLength) { - return subAggregation(new BytesArray(aggregationsBinary, aggregationsBinaryOffset, aggregationsBinaryLength)); - } - - /** - * Sets a raw (xcontent / json) sub addAggregation. - */ - @Deprecated - @SuppressWarnings("unchecked") - public B subAggregation(BytesReference aggregationsBinary) { - this.aggregationsBinary = aggregationsBinary; - return (B) this; - } - - /** - * Sets a raw (xcontent / json) sub addAggregation. - */ - @Deprecated - public B subAggregation(XContentBuilder aggs) { - return subAggregation(aggs.bytes()); - } - - /** - * Sets a raw (xcontent / json) sub addAggregation. - */ - @Deprecated - public B subAggregation(Map aggs) { - try { - XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); - builder.map(aggs); - return subAggregation(builder); - } catch (IOException e) { - throw new ElasticsearchGenerationException("Failed to generate [" + aggs + "]", e); - } - } - - /** - * Sets the meta data to be included in the aggregation response - */ - public B setMetaData(Map metaData) { - this.metaData = metaData; - return (B)this; - } - - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(getName()); - - if (this.metaData != null) { - builder.field("meta", this.metaData); - } - builder.field(type); - internalXContent(builder, params); - - if (aggregations != null || aggregatorFactories != null || pipelineAggregatorFactories != null || aggregationsBinary != null) { - - if (aggregations != null || aggregatorFactories != null || pipelineAggregatorFactories != null) { - builder.startObject("aggregations"); - if (aggregations != null) { - for (AbstractAggregationBuilder subAgg : aggregations) { - subAgg.toXContent(builder, params); - } - } - if (aggregatorFactories != null) { - for (AggregatorFactory subAgg : aggregatorFactories) { - subAgg.toXContent(builder, params); - } - } - if (pipelineAggregatorFactories != null) { - for (PipelineAggregatorFactory subAgg : pipelineAggregatorFactories) { - subAgg.toXContent(builder, params); - } - } - builder.endObject(); - } - - if (aggregationsBinary != null) { - if (XContentFactory.xContentType(aggregationsBinary) == builder.contentType()) { - builder.rawField("aggregations", aggregationsBinary); - } else { - builder.field("aggregations_binary", aggregationsBinary); - } - } - - } - - return builder.endObject(); - } - - protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException; -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java index 44878b9500b..347c2f8776a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java @@ -19,36 +19,39 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.children.Children; -import org.elasticsearch.search.aggregations.bucket.children.ChildrenBuilder; +import org.elasticsearch.search.aggregations.bucket.children.ParentToChildrenAggregator; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregator; import org.elasticsearch.search.aggregations.bucket.filters.Filters; -import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid; -import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridBuilder; +import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser; import org.elasticsearch.search.aggregations.bucket.global.Global; -import org.elasticsearch.search.aggregations.bucket.global.GlobalBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder; +import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.bucket.histogram.HistogramBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator.DateHistogramFactory; import org.elasticsearch.search.aggregations.bucket.missing.Missing; -import org.elasticsearch.search.aggregations.bucket.missing.MissingBuilder; +import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregator; import org.elasticsearch.search.aggregations.bucket.nested.Nested; -import org.elasticsearch.search.aggregations.bucket.nested.NestedBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregator; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested; -import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedBuilder; +import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregator; import org.elasticsearch.search.aggregations.bucket.range.Range; -import org.elasticsearch.search.aggregations.bucket.range.RangeBuilder; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeBuilder; -import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceBuilder; -import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeBuilder; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorFactory; +import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.GeoDistanceFactory; +import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator; import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; @@ -62,9 +65,9 @@ import org.elasticsearch.search.aggregations.metrics.max.MaxAggregator; import org.elasticsearch.search.aggregations.metrics.min.Min; import org.elasticsearch.search.aggregations.metrics.min.MinAggregator; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric; import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregator; import org.elasticsearch.search.aggregations.metrics.stats.Stats; @@ -138,134 +141,151 @@ public class AggregationBuilders { /** * Create a new {@link Filter} aggregation with the given name. */ - public static FilterAggregationBuilder filter(String name) { - return new FilterAggregationBuilder(name); + public static FilterAggregator.Factory filter(String name, QueryBuilder filter) { + return new FilterAggregator.Factory(name, filter); } /** * Create a new {@link Filters} aggregation with the given name. */ - public static FiltersAggregationBuilder filters(String name) { - return new FiltersAggregationBuilder(name); + public static FiltersAggregator.Factory filters(String name, KeyedFilter... filters) { + return new FiltersAggregator.Factory(name, filters); + } + + /** + * Create a new {@link Filters} aggregation with the given name. + */ + public static FiltersAggregator.Factory filters(String name, QueryBuilder... filters) { + return new FiltersAggregator.Factory(name, filters); } /** * Create a new {@link Sampler} aggregation with the given name. */ - public static SamplerAggregationBuilder sampler(String name) { - return new SamplerAggregationBuilder(name); + public static SamplerAggregator.Factory sampler(String name) { + return new SamplerAggregator.Factory(name); + } + + /** + * Create a new {@link Sampler} aggregation with the given name. + */ + public static SamplerAggregator.DiversifiedFactory diversifiedSampler(String name) { + return new SamplerAggregator.DiversifiedFactory(name); } /** * Create a new {@link Global} aggregation with the given name. */ - public static GlobalBuilder global(String name) { - return new GlobalBuilder(name); + public static GlobalAggregator.Factory global(String name) { + return new GlobalAggregator.Factory(name); } /** * Create a new {@link Missing} aggregation with the given name. */ - public static MissingBuilder missing(String name) { - return new MissingBuilder(name); + public static MissingAggregator.Factory missing(String name) { + return new MissingAggregator.Factory(name, null); } /** * Create a new {@link Nested} aggregation with the given name. */ - public static NestedBuilder nested(String name) { - return new NestedBuilder(name); + public static NestedAggregator.Factory nested(String name, String path) { + return new NestedAggregator.Factory(name, path); } /** * Create a new {@link ReverseNested} aggregation with the given name. */ - public static ReverseNestedBuilder reverseNested(String name) { - return new ReverseNestedBuilder(name); + public static ReverseNestedAggregator.Factory reverseNested(String name) { + return new ReverseNestedAggregator.Factory(name); } /** * Create a new {@link Children} aggregation with the given name. */ - public static ChildrenBuilder children(String name) { - return new ChildrenBuilder(name); + public static ParentToChildrenAggregator.Factory children(String name, String childType) { + return new ParentToChildrenAggregator.Factory(name, childType); } /** * Create a new {@link GeoDistance} aggregation with the given name. */ - public static GeoDistanceBuilder geoDistance(String name) { - return new GeoDistanceBuilder(name); + public static GeoDistanceFactory geoDistance(String name, GeoPoint origin) { + return new GeoDistanceFactory(name, origin); } /** * Create a new {@link Histogram} aggregation with the given name. */ - public static HistogramBuilder histogram(String name) { - return new HistogramBuilder(name); + public static HistogramAggregator.Factory histogram(String name) { + return new HistogramAggregator.Factory<>(name); } /** * Create a new {@link GeoHashGrid} aggregation with the given name. */ - public static GeoHashGridBuilder geohashGrid(String name) { - return new GeoHashGridBuilder(name); + public static GeoHashGridParser.GeoGridFactory geohashGrid(String name) { + return new GeoHashGridParser.GeoGridFactory(name); } /** * Create a new {@link SignificantTerms} aggregation with the given name. */ - public static SignificantTermsBuilder significantTerms(String name) { - return new SignificantTermsBuilder(name); + public static SignificantTermsAggregatorFactory significantTerms(String name) { + return new SignificantTermsAggregatorFactory(name, null); } /** - * Create a new {@link DateHistogramBuilder} aggregation with the given name. + * Create a new {@link DateHistogramFactory} aggregation with the given + * name. */ - public static DateHistogramBuilder dateHistogram(String name) { - return new DateHistogramBuilder(name); + public static DateHistogramFactory dateHistogram(String name) { + return new DateHistogramFactory(name); } /** * Create a new {@link Range} aggregation with the given name. */ - public static RangeBuilder range(String name) { - return new RangeBuilder(name); + public static RangeAggregator.Factory range(String name) { + return new RangeAggregator.Factory(name); } /** - * Create a new {@link DateRangeBuilder} aggregation with the given name. + * Create a new {@link DateRangeAggregatorFactory} aggregation with the + * given name. */ - public static DateRangeBuilder dateRange(String name) { - return new DateRangeBuilder(name); + public static DateRangeAggregatorFactory dateRange(String name) { + return new DateRangeAggregatorFactory(name); } /** - * Create a new {@link IPv4RangeBuilder} aggregation with the given name. + * Create a new {@link IPv4RangeAggregatorFactory} aggregation with the + * given name. */ - public static IPv4RangeBuilder ipRange(String name) { - return new IPv4RangeBuilder(name); + public static IPv4RangeAggregatorFactory ipRange(String name) { + return new IPv4RangeAggregatorFactory(name); } /** * Create a new {@link Terms} aggregation with the given name. */ - public static TermsBuilder terms(String name) { - return new TermsBuilder(name); + public static TermsAggregatorFactory terms(String name) { + return new TermsAggregatorFactory(name, null); } /** * Create a new {@link Percentiles} aggregation with the given name. */ - public static PercentilesBuilder percentiles(String name) { - return new PercentilesBuilder(name); + public static PercentilesAggregatorFactory percentiles(String name) { + return new PercentilesAggregatorFactory(name); } /** * Create a new {@link PercentileRanks} aggregation with the given name. */ - public static PercentileRanksBuilder percentileRanks(String name) { - return new PercentileRanksBuilder(name); + public static PercentileRanksAggregatorFactory percentileRanks(String name) { + return new PercentileRanksAggregatorFactory(name); } /** diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java index d82d5806942..f104399490d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/Aggregator.java @@ -68,7 +68,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable { * @return an empty {@link AggregatorFactory} instance for this parser * that can be used for deserialization */ - AggregatorFactory[] getFactoryPrototypes(); + AggregatorFactory getFactoryPrototypes(); } /** diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index 66818a7cd76..c12494c43e9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -46,25 +46,25 @@ import java.util.Set; */ public class AggregatorFactories extends ToXContentToBytes implements Writeable { - public static final AggregatorFactories EMPTY = new AggregatorFactories(new AggregatorFactory[0], + public static final AggregatorFactories EMPTY = new AggregatorFactories(new AggregatorFactory[0], new ArrayList()); - private AggregatorFactory parent; - private AggregatorFactory[] factories; + private AggregatorFactory parent; + private AggregatorFactory[] factories; private List pipelineAggregatorFactories; public static Builder builder() { return new Builder(); } - private AggregatorFactories(AggregatorFactory[] factories, + private AggregatorFactories(AggregatorFactory[] factories, List pipelineAggregators) { this.factories = factories; this.pipelineAggregatorFactories = pipelineAggregators; } public void init(AggregationContext context) { - for (AggregatorFactory factory : factories) { + for (AggregatorFactory factory : factories) { factory.init(context); } } @@ -82,7 +82,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< * buckets. */ public Aggregator[] createSubAggregators(Aggregator parent) throws IOException { - Aggregator[] aggregators = new Aggregator[count()]; + Aggregator[] aggregators = new Aggregator[countAggregators()]; for (int i = 0; i < factories.length; ++i) { // TODO: sometimes even sub aggregations always get called with bucket 0, eg. if // you have a terms agg under a top-level filter agg. We should have a way to @@ -105,19 +105,30 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< return aggregators; } - public int count() { + /** + * @return the number of sub-aggregator factories not including pipeline + * aggregator factories + */ + public int countAggregators() { return factories.length; } - void setParent(AggregatorFactory parent) { + /** + * @return the number of pipeline aggregator factories + */ + public int countPipelineAggregators() { + return pipelineAggregatorFactories.size(); + } + + void setParent(AggregatorFactory parent) { this.parent = parent; - for (AggregatorFactory factory : factories) { + for (AggregatorFactory factory : factories) { factory.parent = parent; } } public void validate() { - for (AggregatorFactory factory : factories) { + for (AggregatorFactory factory : factories) { factory.validate(); } for (PipelineAggregatorFactory factory : pipelineAggregatorFactories) { @@ -128,11 +139,21 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< public static class Builder { private final Set names = new HashSet<>(); - private final List factories = new ArrayList<>(); + private final List> factories = new ArrayList<>(); private final List pipelineAggregatorFactories = new ArrayList<>(); private boolean skipResolveOrder; - public Builder addAggregator(AggregatorFactory factory) { + public Builder addAggregators(AggregatorFactories factories) { + for (AggregatorFactory factory : factories.factories) { + addAggregator(factory); + } + for (PipelineAggregatorFactory factory : factories.pipelineAggregatorFactories) { + addPipelineAggregator(factory); + } + return this; + } + + public Builder addAggregator(AggregatorFactory factory) { if (!names.add(factory.name)) { throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]"); } @@ -163,17 +184,17 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< } else { orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorFactories, this.factories); } - return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]), orderedpipelineAggregators); + return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]), orderedpipelineAggregators); } private List resolvePipelineAggregatorOrder(List pipelineAggregatorFactories, - List aggFactories) { + List> aggFactories) { Map pipelineAggregatorFactoriesMap = new HashMap<>(); for (PipelineAggregatorFactory factory : pipelineAggregatorFactories) { pipelineAggregatorFactoriesMap.put(factory.getName(), factory); } - Map aggFactoriesMap = new HashMap<>(); - for (AggregatorFactory aggFactory : aggFactories) { + Map> aggFactoriesMap = new HashMap<>(); + for (AggregatorFactory aggFactory : aggFactories) { aggFactoriesMap.put(aggFactory.name, aggFactory); } List orderedPipelineAggregatorrs = new LinkedList<>(); @@ -187,7 +208,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< return orderedPipelineAggregatorrs; } - private void resolvePipelineAggregatorOrder(Map aggFactoriesMap, + private void resolvePipelineAggregatorOrder(Map> aggFactoriesMap, Map pipelineAggregatorFactoriesMap, List orderedPipelineAggregators, List unmarkedFactories, Set temporarilyMarked, PipelineAggregatorFactory factory) { @@ -202,7 +223,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) { continue; } else if (aggFactoriesMap.containsKey(firstAggName)) { - AggregatorFactory aggFactory = aggFactoriesMap.get(firstAggName); + AggregatorFactory aggFactory = aggFactoriesMap.get(firstAggName); for (int i = 1; i < bucketsPathElements.size(); i++) { PathElement pathElement = bucketsPathElements.get(i); String aggName = pathElement.name; @@ -211,9 +232,9 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< } else { // Check the non-pipeline sub-aggregator // factories - AggregatorFactory[] subFactories = aggFactory.factories.factories; + AggregatorFactory[] subFactories = aggFactory.factories.factories; boolean foundSubFactory = false; - for (AggregatorFactory subFactory : subFactories) { + for (AggregatorFactory subFactory : subFactories) { if (aggName.equals(subFactory.name)) { aggFactory = subFactory; foundSubFactory = true; @@ -254,8 +275,8 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< } } - AggregatorFactory[] getAggregatorFactories() { - return this.factories.toArray(new AggregatorFactory[this.factories.size()]); + AggregatorFactory[] getAggregatorFactories() { + return this.factories.toArray(new AggregatorFactory[this.factories.size()]); } List getPipelineAggregatorFactories() { @@ -266,9 +287,9 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< @Override public AggregatorFactories readFrom(StreamInput in) throws IOException { int factoriesSize = in.readVInt(); - AggregatorFactory[] factoriesList = new AggregatorFactory[factoriesSize]; + AggregatorFactory[] factoriesList = new AggregatorFactory[factoriesSize]; for (int i = 0; i < factoriesSize; i++) { - AggregatorFactory factory = in.readAggregatorFactory(); + AggregatorFactory factory = in.readAggregatorFactory(); factoriesList[i] = factory; } int pipelineFactoriesSize = in.readVInt(); @@ -285,7 +306,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(this.factories.length); - for (AggregatorFactory factory : factories) { + for (AggregatorFactory factory : factories) { out.writeAggregatorFactory(factory); } out.writeVInt(this.pipelineAggregatorFactories.size()); @@ -298,7 +319,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable< public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (factories != null) { - for (AggregatorFactory subAgg : factories) { + for (AggregatorFactory subAgg : factories) { subAgg.toXContent(builder, params); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java index 8f51cc701b9..ef944c7b262 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.ObjectArray; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.internal.SearchContext.Lifetime; @@ -96,6 +97,30 @@ public abstract class AggregatorFactory> extend return (AF) this; } + /** + * Add a sub aggregation to this aggregation. + */ + @SuppressWarnings("unchecked") + public AF subAggregation(AggregatorFactory aggregation) { + AggregatorFactories.Builder builder = AggregatorFactories.builder(); + builder.addAggregators(factories); + builder.addAggregator(aggregation); + factories = builder.build(); + return (AF) this; + } + + /** + * Add a sub aggregation to this aggregation. + */ + @SuppressWarnings("unchecked") + public AF subAggregation(PipelineAggregatorFactory aggregation) { + AggregatorFactories.Builder builder = AggregatorFactories.builder(); + builder.addAggregators(factories); + builder.addPipelineAggregator(aggregation); + factories = builder.build(); + return (AF) this; + } + public String name() { return name; } @@ -134,8 +159,9 @@ public abstract class AggregatorFactory> extend public void doValidate() { } - public void setMetaData(Map metaData) { + public AF setMetaData(Map metaData) { this.metaData = metaData; + return (AF) this; } @Override @@ -148,10 +174,7 @@ public abstract class AggregatorFactory> extend return factory; } - // NORELEASE make this abstract when agg refactor complete - protected AggregatorFactory doReadFrom(String name, StreamInput in) throws IOException { - return null; - } + protected abstract AggregatorFactory doReadFrom(String name, StreamInput in) throws IOException; @Override public final void writeTo(StreamOutput out) throws IOException { @@ -161,9 +184,7 @@ public abstract class AggregatorFactory> extend out.writeMap(metaData); } - // NORELEASE make this abstract when agg refactor complete - protected void doWriteTo(StreamOutput out) throws IOException { - } + protected abstract void doWriteTo(StreamOutput out) throws IOException; @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { @@ -175,7 +196,7 @@ public abstract class AggregatorFactory> extend builder.field(type.name()); internalXContent(builder, params); - if (factories != null && factories.count() > 0) { + if (factories != null && (factories.countAggregators() + factories.countPipelineAggregators()) > 0) { builder.field("aggregations"); factories.toXContent(builder, params); @@ -184,10 +205,7 @@ public abstract class AggregatorFactory> extend return builder.endObject(); } - // NORELEASE make this method abstract when agg refactor complete - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - return builder; - } + protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException; @Override public String getWriteableName() { @@ -327,12 +345,7 @@ public abstract class AggregatorFactory> extend return Objects.hash(factories, metaData, name, type, doHashCode()); } - // NORELEASE make this method abstract here when agg refactor complete (so - // that subclasses are forced to implement it) - protected int doHashCode() { - throw new UnsupportedOperationException( - "This method should be implemented by a sub-class and should not rely on this method. When agg re-factoring is complete this method will be made abstract."); - } + protected abstract int doHashCode(); @Override public boolean equals(Object obj) { @@ -352,11 +365,6 @@ public abstract class AggregatorFactory> extend return doEquals(obj); } - // NORELEASE make this method abstract here when agg refactor complete (so - // that subclasses are forced to implement it) - protected boolean doEquals(Object obj) { - throw new UnsupportedOperationException( - "This method should be implemented by a sub-class and should not rely on this method. When agg re-factoring is complete this method will be made abstract."); - } + protected abstract boolean doEquals(Object obj); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java index 9813be022b5..357fedc0baa 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java @@ -60,23 +60,15 @@ public class AggregatorParsers { Map aggParsersBuilder = new HashMap<>(aggParsers.size()); for (Aggregator.Parser parser : aggParsers) { aggParsersBuilder.put(parser.type(), parser); - AggregatorFactory[] factoryPrototypes = parser.getFactoryPrototypes(); - // NORELEASE remove this check when agg refactoring complete - if (factoryPrototypes != null) { - for (AggregatorFactory factoryPrototype : factoryPrototypes) { - namedWriteableRegistry.registerPrototype(AggregatorFactory.class, factoryPrototype); - } - } + AggregatorFactory factoryPrototype = parser.getFactoryPrototypes(); + namedWriteableRegistry.registerPrototype(AggregatorFactory.class, factoryPrototype); } this.aggParsers = unmodifiableMap(aggParsersBuilder); Map pipelineAggregatorParsersBuilder = new HashMap<>(pipelineAggregatorParsers.size()); for (PipelineAggregator.Parser parser : pipelineAggregatorParsers) { pipelineAggregatorParsersBuilder.put(parser.type(), parser); PipelineAggregatorFactory factoryPrototype = parser.getFactoryPrototype(); - // NORELEASE remove this check when agg refactoring complete - if (factoryPrototype != null) { - namedWriteableRegistry.registerPrototype(PipelineAggregatorFactory.class, factoryPrototype); - } + namedWriteableRegistry.registerPrototype(PipelineAggregatorFactory.class, factoryPrototype); } this.pipelineAggregatorParsers = unmodifiableMap(pipelineAggregatorParsersBuilder); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/ValuesSourceAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/ValuesSourceAggregationBuilder.java deleted file mode 100644 index 2d3c0cf804e..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/ValuesSourceAggregationBuilder.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptService.ScriptType; - -import java.io.IOException; -import java.util.Map; - -/** - * A base class for all bucket aggregation builders that are based on values (either script generated or field data values) - */ -public abstract class ValuesSourceAggregationBuilder> extends AggregationBuilder { - - private Script script; - private String field; - @Deprecated - private String scriptString; - @Deprecated - private String lang; - @Deprecated - private Map params; - private Object missing; - - /** - * Constructs a new builder. - * - * @param name The name of the aggregation. - * @param type The type of the aggregation. - */ - protected ValuesSourceAggregationBuilder(String name, String type) { - super(name, type); - } - - /** - * Sets the field from which the values will be extracted. - * - * @param field The name of the field - * @return This builder (fluent interface support) - */ - @SuppressWarnings("unchecked") - public B field(String field) { - this.field = field; - return (B) this; - } - - /** - * Sets the script which generates the values. If the script is configured along with the field (as in {@link #field(String)}), then - * this script will be treated as a {@code value script}. A value script will be applied on the values that are extracted from - * the field data (you can refer to that value in the script using the {@code _value} reserved variable). If only the script is configured - * (and the no field is configured next to it), then the script will be responsible to generate the values that will be aggregated. - * - * @param script The configured script. - * @return This builder (fluent interface support) - */ - @SuppressWarnings("unchecked") - public B script(Script script) { - this.script = script; - return (B) this; - } - - /** - * Configure the value to use when documents miss a value. - */ - public B missing(Object missingValue) { - this.missing = missingValue; - return (B) this; - } - - @Override - protected final XContentBuilder internalXContent(XContentBuilder builder, Params builderParams) throws IOException { - builder.startObject(); - if (field != null) { - builder.field("field", field); - } - - if (script == null) { - if (scriptString != null) { - builder.field("script", new Script(scriptString, ScriptType.INLINE, lang, params)); - } - } else { - builder.field("script", script); - } - if (missing != null) { - builder.field("missing", missing); - } - - doInternalXContent(builder, builderParams); - return builder.endObject(); - } - - protected abstract XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException; -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenBuilder.java deleted file mode 100644 index dc2670cd21e..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenBuilder.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.aggregations.bucket.children; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -import java.io.IOException; - -/** - * Builder for the {@link Children} aggregation. - */ -public class ChildrenBuilder extends AggregationBuilder { - - private String childType; - - /** - * Sole constructor. - */ - public ChildrenBuilder(String name) { - super(name, InternalChildren.TYPE.name()); - } - - /** - * Set the type of children documents. This parameter is compulsory. - */ - public ChildrenBuilder childType(String childType) { - this.childType = childType; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (childType == null) { - throw new SearchSourceBuilderException("child_type must be set on children aggregation [" + getName() + "]"); - } - builder.field("type", childType); - return builder.endObject(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java index b4e2c88fd08..a5f71334485 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java @@ -67,7 +67,7 @@ public class ChildrenParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new ParentToChildrenAggregator.Factory(null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new ParentToChildrenAggregator.Factory(null, null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java deleted file mode 100644 index 45d40bd34ba..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregationBuilder.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.filter; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -import java.io.IOException; - -/** - * Builder for the {@link Filter} aggregation. - */ -public class FilterAggregationBuilder extends AggregationBuilder { - - private QueryBuilder filter; - - /** - * Sole constructor. - */ - public FilterAggregationBuilder(String name) { - super(name, InternalFilter.TYPE.name()); - } - - /** - * Set the filter to use, only documents that match this filter will fall - * into the bucket defined by this {@link Filter} aggregation. - */ - public FilterAggregationBuilder filter(QueryBuilder filter) { - this.filter = filter; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - if (filter == null) { - throw new SearchSourceBuilderException("filter must be set on filter aggregation [" + getName() + "]"); - } - filter.toXContent(builder, params); - return builder; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java index 00953512dbe..c79b4477f01 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterParser.java @@ -52,8 +52,8 @@ public class FilterParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new FilterAggregator.Factory(null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new FilterAggregator.Factory(null, null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java deleted file mode 100644 index 6f61a891648..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregationBuilder.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.filters; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -/** - * Builder for the {@link Filters} aggregation. - */ -public class FiltersAggregationBuilder extends AggregationBuilder { - - private Map keyedFilters = null; - private List nonKeyedFilters = null; - private Boolean otherBucket; - private String otherBucketKey; - - /** - * Sole constructor. - */ - public FiltersAggregationBuilder(String name) { - super(name, InternalFilters.TYPE.name()); - } - - /** - * Add a new filter with the given key. - * NOTE: if a filter was already defined for this key, then this filter will replace it. - * NOTE: the same {@link FiltersAggregationBuilder} cannot have both keyed and non-keyed filters - */ - public FiltersAggregationBuilder filter(String key, QueryBuilder filter) { - if (keyedFilters == null) { - keyedFilters = new LinkedHashMap<>(); - } - keyedFilters.put(key, filter); - return this; - } - - /** - * Add a new filter with no key. - * NOTE: the same {@link FiltersAggregationBuilder} cannot have both keyed and non-keyed filters. - */ - public FiltersAggregationBuilder filter(QueryBuilder filter) { - if (nonKeyedFilters == null) { - nonKeyedFilters = new ArrayList<>(); - } - nonKeyedFilters.add(filter); - return this; - } - - /** - * Include a bucket for documents not matching any filter - */ - public FiltersAggregationBuilder otherBucket(boolean otherBucket) { - this.otherBucket = otherBucket; - return this; - } - - /** - * The key to use for the bucket for documents not matching any filter. Will - * implicitly enable the other bucket if set. - */ - public FiltersAggregationBuilder otherBucketKey(String otherBucketKey) { - this.otherBucketKey = otherBucketKey; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (keyedFilters == null && nonKeyedFilters == null) { - throw new SearchSourceBuilderException("At least one filter must be set on filter aggregation [" + getName() + "]"); - } - if (keyedFilters != null && nonKeyedFilters != null) { - throw new SearchSourceBuilderException("Cannot add both keyed and non-keyed filters to filters aggregation"); - } - - if (keyedFilters != null) { - builder.startObject(FiltersParser.FILTERS_FIELD.getPreferredName()); - for (Map.Entry entry : keyedFilters.entrySet()) { - builder.field(entry.getKey()); - entry.getValue().toXContent(builder, params); - } - builder.endObject(); - } - if (nonKeyedFilters != null) { - builder.startArray(FiltersParser.FILTERS_FIELD.getPreferredName()); - for (QueryBuilder filterBuilder : nonKeyedFilters) { - filterBuilder.toXContent(builder, params); - } - builder.endArray(); - - } - if (otherBucketKey != null) { - builder.field(FiltersParser.OTHER_BUCKET_KEY_FIELD.getPreferredName(), otherBucketKey); - } - if (otherBucket != null) { - builder.field(FiltersParser.OTHER_BUCKET_FIELD.getPreferredName(), otherBucket); - } - return builder.endObject(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java index 955802ec2ff..e0866cf8270 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java @@ -45,6 +45,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Objects; @@ -210,7 +211,11 @@ public class FiltersAggregator extends BucketsAggregator { * @param filters * the KeyedFilters to use with this aggregation. */ - public Factory(String name, List filters) { + public Factory(String name, KeyedFilter... filters) { + this(name, Arrays.asList(filters)); + } + + private Factory(String name, List filters) { super(name, InternalFilters.TYPE); this.filters = filters; this.keyed = true; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersParser.java index 8a9beecac7c..cd38f6d0e3e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersParser.java @@ -32,7 +32,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; /** @@ -128,7 +127,7 @@ public class FiltersParser implements Aggregator.Parser { FiltersAggregator.Factory factory; if (keyedFilters != null) { - factory = new FiltersAggregator.Factory(aggregationName, keyedFilters); + factory = new FiltersAggregator.Factory(aggregationName, keyedFilters.toArray(new FiltersAggregator.KeyedFilter[keyedFilters.size()])); } else { factory = new FiltersAggregator.Factory(aggregationName, nonKeyedFilters.toArray(new QueryBuilder[nonKeyedFilters.size()])); } @@ -142,8 +141,8 @@ public class FiltersParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new FiltersAggregator.Factory(null, Collections.emptyList()) }; + public AggregatorFactory getFactoryPrototypes() { + return new FiltersAggregator.Factory(null, new FiltersAggregator.KeyedFilter[0]); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridBuilder.java deleted file mode 100644 index a1f12f465ca..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridBuilder.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.aggregations.bucket.geogrid; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; - -import java.io.IOException; - -/** - * Creates an aggregation based on bucketing points into GeoHashes - */ -public class GeoHashGridBuilder extends AggregationBuilder { - - - private String field; - private int precision = GeoHashGridParams.DEFAULT_PRECISION; - private int requiredSize = GeoHashGridParams.DEFAULT_MAX_NUM_CELLS; - private int shardSize = 0; - - /** - * Sole constructor. - */ - public GeoHashGridBuilder(String name) { - super(name, InternalGeoHashGrid.TYPE.name()); - } - - /** - * Set the field to use to get geo points. - */ - public GeoHashGridBuilder field(String field) { - this.field = field; - return this; - } - - /** - * Set the geohash precision to use for this aggregation. The higher the - * precision, the more fine-grained this aggregation will be. - */ - public GeoHashGridBuilder precision(int precision) { - this.precision = GeoHashGridParams.checkPrecision(precision); - return this; - } - - /** - * Set the number of buckets to return. - */ - public GeoHashGridBuilder size(int requiredSize) { - this.requiredSize = requiredSize; - return this; - } - - /** - * Expert: Set the number of buckets to get on each shard to improve - * accuracy. - */ - public GeoHashGridBuilder shardSize(int shardSize) { - this.shardSize = shardSize; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (field != null) { - builder.field("field", field); - } - if (precision != GeoHashGridParams.DEFAULT_PRECISION) { - builder.field(GeoHashGridParams.FIELD_PRECISION.getPreferredName(), precision); - } - if (requiredSize != GeoHashGridParams.DEFAULT_MAX_NUM_CELLS) { - builder.field(GeoHashGridParams.FIELD_SIZE.getPreferredName(), requiredSize); - } - if (shardSize != 0) { - builder.field(GeoHashGridParams.FIELD_SHARD_SIZE.getPreferredName(), shardSize); - } - - return builder.endObject(); - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java index 7f13a1b896d..091daba20c6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridParser.java @@ -72,8 +72,8 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser { return InternalGeoHashGrid.TYPE.name(); } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new GeoGridFactory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new GeoGridFactory(null); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalBuilder.java deleted file mode 100644 index 3e9f2ba79a1..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalBuilder.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.global; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; - -import java.io.IOException; - -/** - * Builder for the {@link Global} aggregation. - */ -public class GlobalBuilder extends AggregationBuilder { - - /** - * Sole constructor. - */ - public GlobalBuilder(String name) { - super(name, InternalGlobal.TYPE.name()); - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().endObject(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalParser.java index 52ab6f02001..97b6a33beb6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalParser.java @@ -42,8 +42,8 @@ public class GlobalParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new GlobalAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new GlobalAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java deleted file mode 100644 index e4f37123565..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramBuilder.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.histogram; - -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; -import org.joda.time.DateTime; - -import java.io.IOException; - -/** - * Builder for the {@code DateHistogram} aggregation. - */ -public class DateHistogramBuilder extends ValuesSourceAggregationBuilder { - - private Object interval; - private Histogram.Order order; - private Long minDocCount; - private Object extendedBoundsMin; - private Object extendedBoundsMax; - private String timeZone; - private String format; - private String offset; - - /** - * Sole constructor. - */ - public DateHistogramBuilder(String name) { - super(name, InternalDateHistogram.TYPE.name()); - } - - /** - * Set the interval in milliseconds. - */ - public DateHistogramBuilder interval(long interval) { - this.interval = interval; - return this; - } - - /** - * Set the interval. - */ - public DateHistogramBuilder interval(DateHistogramInterval interval) { - this.interval = interval; - return this; - } - - /** - * Set the order by which the buckets will be returned. - */ - public DateHistogramBuilder order(Histogram.Order order) { - this.order = order; - return this; - } - - /** - * Set the minimum document count per bucket. Buckets with less documents - * than this min value will not be returned. - */ - public DateHistogramBuilder minDocCount(long minDocCount) { - this.minDocCount = minDocCount; - return this; - } - - /** - * Set the timezone in which to translate dates before computing buckets. - */ - public DateHistogramBuilder timeZone(String timeZone) { - this.timeZone = timeZone; - return this; - } - - /** - * @param offset sets the offset of time intervals in this histogram - * @return the current builder - */ - public DateHistogramBuilder offset(String offset) { - this.offset = offset; - return this; - } - - /** - * Set the format to use for dates. - */ - public DateHistogramBuilder format(String format) { - this.format = format; - return this; - } - - /** - * Set extended bounds for the histogram. In case the lower value in the - * histogram would be greater than min or the upper value would - * be less than max, empty buckets will be generated. - */ - public DateHistogramBuilder extendedBounds(Long min, Long max) { - extendedBoundsMin = min; - extendedBoundsMax = max; - return this; - } - - /** - * Set extended bounds for the histogram. In case the lower value in the - * histogram would be greater than min or the upper value would - * be less than max, empty buckets will be generated. - */ - public DateHistogramBuilder extendedBounds(String min, String max) { - extendedBoundsMin = min; - extendedBoundsMax = max; - return this; - } - - /** - * Set extended bounds for the histogram. In case the lower value in the - * histogram would be greater than min or the upper value would - * be less than max, empty buckets will be generated. - */ - public DateHistogramBuilder extendedBounds(DateTime min, DateTime max) { - extendedBoundsMin = min; - extendedBoundsMax = max; - return this; - } - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (interval == null) { - throw new SearchSourceBuilderException("[interval] must be defined for histogram aggregation [" + getName() + "]"); - } - if (interval instanceof Number) { - interval = TimeValue.timeValueMillis(((Number) interval).longValue()).toString(); - } - builder.field("interval", interval); - - if (minDocCount != null) { - builder.field("min_doc_count", minDocCount); - } - - if (order != null) { - builder.field("order"); - order.toXContent(builder, params); - } - - if (timeZone != null) { - builder.field("time_zone", timeZone); - } - - if (offset != null) { - builder.field("offset", offset); - } - - if (format != null) { - builder.field("format", format); - } - - if (extendedBoundsMin != null || extendedBoundsMax != null) { - builder.startObject(ExtendedBounds.EXTENDED_BOUNDS_FIELD.getPreferredName()); - if (extendedBoundsMin != null) { - builder.field("min", extendedBoundsMin); - } - if (extendedBoundsMax != null) { - builder.field("max", extendedBoundsMax); - } - builder.endObject(); - } - - return builder; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java index ba260417c01..cd2c8547b77 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramInterval.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; +import java.util.Objects; /** * The interval the date histogram is based on. @@ -74,6 +75,23 @@ public class DateHistogramInterval implements Writeable { return expression; } + @Override + public int hashCode() { + return Objects.hash(expression); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DateHistogramInterval other = (DateHistogramInterval) obj; + return Objects.equals(expression, other.expression); + } + @Override public DateHistogramInterval readFrom(StreamInput in) throws IOException { return new DateHistogramInterval(in.readString()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java index 1cb58f3c082..89b864ca29f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramParser.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.rounding.Rounding; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator.DateHistogramFactory; import org.elasticsearch.search.aggregations.support.ValueType; @@ -97,15 +96,11 @@ public class DateHistogramParser extends HistogramParser { @Override protected long parseStringOffset(String offset) throws IOException { - if (offset.charAt(0) == '-') { - return -TimeValue.parseTimeValue(offset.substring(1), null, getClass().getSimpleName() + ".parseOffset").millis(); - } - int beginIndex = offset.charAt(0) == '+' ? 1 : 0; - return TimeValue.parseTimeValue(offset.substring(beginIndex), null, getClass().getSimpleName() + ".parseOffset").millis(); + return DateHistogramFactory.parseStringOffset(offset); } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { HistogramAggregator.DateHistogramFactory.PROTOTYPE }; + public AggregatorFactory getFactoryPrototypes() { + return HistogramAggregator.DateHistogramFactory.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java index 5a2cd584557..91a3d232e8c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBounds.java @@ -59,6 +59,11 @@ public class ExtendedBounds implements ToXContent { this.max = max; } + public ExtendedBounds(String minAsStr, String maxAsStr) { + this.minAsStr = minAsStr; + this.maxAsStr = maxAsStr; + } + void processAndValidate(String aggName, SearchContext context, ValueParser parser) { assert parser != null; if (minAsStr != null) { @@ -90,6 +95,8 @@ public class ExtendedBounds implements ToXContent { } else { out.writeBoolean(false); } + out.writeOptionalString(minAsStr); + out.writeOptionalString(maxAsStr); } static ExtendedBounds readFrom(StreamInput in) throws IOException { @@ -100,6 +107,8 @@ public class ExtendedBounds implements ToXContent { if (in.readBoolean()) { bounds.max = in.readLong(); } + bounds.minAsStr = in.readOptionalString(); + bounds.maxAsStr = in.readOptionalString(); return bounds; } @@ -139,9 +148,13 @@ public class ExtendedBounds implements ToXContent { builder.startObject(EXTENDED_BOUNDS_FIELD.getPreferredName()); if (min != null) { builder.field(MIN_FIELD.getPreferredName(), min); + } else { + builder.field(MIN_FIELD.getPreferredName(), minAsStr); } if (max != null) { builder.field(MAX_FIELD.getPreferredName(), max); + } else { + builder.field(MAX_FIELD.getPreferredName(), maxAsStr); } builder.endObject(); return builder; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java index 9d57cb273e0..d357562562c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java @@ -162,7 +162,7 @@ public class HistogramAggregator extends BucketsAggregator { Releasables.close(bucketOrds); } - public static class Factory> extends ValuesSourceAggregatorFactory> { + public static class Factory> extends ValuesSourceAggregatorFactory { public static final Factory PROTOTYPE = new Factory(""); @@ -286,7 +286,8 @@ public class HistogramAggregator extends BucketsAggregator { @Override protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(Rounding.Interval.INTERVAL_FIELD.getPreferredName(), interval); + builder.field(Rounding.Interval.INTERVAL_FIELD.getPreferredName()); + doXContentInterval(builder, params); builder.field(Rounding.OffsetRounding.OFFSET_FIELD.getPreferredName(), offset); if (order != null) { @@ -305,6 +306,11 @@ public class HistogramAggregator extends BucketsAggregator { return builder; } + protected XContentBuilder doXContentInterval(XContentBuilder builder, Params params) throws IOException { + builder.value(interval); + return builder; + } + @Override public String getWriteableName() { return InternalHistogram.TYPE.name(); @@ -413,6 +419,20 @@ public class HistogramAggregator extends BucketsAggregator { return this; } + public DateHistogramFactory offset(String offset) { + return offset(parseStringOffset(offset)); + } + + protected static long parseStringOffset(String offset) { + if (offset.charAt(0) == '-') { + return -TimeValue.parseTimeValue(offset.substring(1), null, DateHistogramFactory.class.getSimpleName() + ".parseOffset") + .millis(); + } + int beginIndex = offset.charAt(0) == '+' ? 1 : 0; + return TimeValue.parseTimeValue(offset.substring(beginIndex), null, DateHistogramFactory.class.getSimpleName() + ".parseOffset") + .millis(); + } + public DateHistogramInterval dateHistogramInterval() { return dateHistogramInterval; } @@ -420,6 +440,7 @@ public class HistogramAggregator extends BucketsAggregator { @Override protected Rounding createRounding() { TimeZoneRounding.Builder tzRoundingBuilder; + if (dateHistogramInterval != null) { DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString()); if (dateTimeUnit != null) { tzRoundingBuilder = TimeZoneRounding.builder(dateTimeUnit); @@ -428,6 +449,10 @@ public class HistogramAggregator extends BucketsAggregator { tzRoundingBuilder = TimeZoneRounding.builder(TimeValue.parseTimeValue(dateHistogramInterval.toString(), null, getClass() .getSimpleName() + ".interval")); } + } else { + // the interval is an integer time value in millis? + tzRoundingBuilder = TimeZoneRounding.builder(TimeValue.timeValueMillis(interval())); + } if (timeZone() != null) { tzRoundingBuilder.timeZone(timeZone()); } @@ -454,6 +479,16 @@ public class HistogramAggregator extends BucketsAggregator { return InternalDateHistogram.TYPE.name(); } + @Override + protected XContentBuilder doXContentInterval(XContentBuilder builder, Params params) throws IOException { + if (dateHistogramInterval == null) { + super.doXContentInterval(builder, params); + } else { + builder.value(dateHistogramInterval.toString()); + } + return builder; + } + @Override protected DateHistogramFactory createFactoryFromStream(String name, StreamInput in) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramBuilder.java deleted file mode 100644 index 0e965a59b1c..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramBuilder.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.histogram; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -import java.io.IOException; - -/** - * Builder for the {@link Histogram} aggregation. - */ -public class HistogramBuilder extends ValuesSourceAggregationBuilder { - - private Long interval; - private Histogram.Order order; - private Long minDocCount; - private Long extendedBoundsMin; - private Long extendedBoundsMax; - private Long offset; - - /** - * Constructs a new histogram aggregation builder. - * - * @param name The name of the aggregation (will serve as the unique identifier for the aggregation result in the response) - */ - public HistogramBuilder(String name) { - super(name, InternalHistogram.TYPE.name()); - } - - /** - * Sets the interval for the histogram. - * - * @param interval The interval for the histogram - * @return This builder - */ - public HistogramBuilder interval(long interval) { - this.interval = interval; - return this; - } - - /** - * Sets the order by which the buckets will be returned. - * - * @param order The order by which the buckets will be returned - * @return This builder - */ - public HistogramBuilder order(Histogram.Order order) { - this.order = order; - return this; - } - - /** - * Sets the minimum document count per bucket. Buckets with less documents than this min value will not be returned. - * - * @param minDocCount The minimum document count per bucket - * @return This builder - */ - public HistogramBuilder minDocCount(long minDocCount) { - this.minDocCount = minDocCount; - return this; - } - - /** - * Set extended bounds for the histogram. In case the lower value in the - * histogram would be greater than min or the upper value would - * be less than max, empty buckets will be generated. - */ - public HistogramBuilder extendedBounds(Long min, Long max) { - extendedBoundsMin = min; - extendedBoundsMax = max; - return this; - } - - /** - * Set the offset to apply to shift bucket boundaries. - */ - public HistogramBuilder offset(long offset) { - this.offset = offset; - return this; - } - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (interval == null) { - throw new SearchSourceBuilderException("[interval] must be defined for histogram aggregation [" + getName() + "]"); - } - builder.field("interval", interval); - - if (order != null) { - builder.field("order"); - order.toXContent(builder, params); - } - - if (offset != null) { - builder.field("offset", offset); - } - - if (minDocCount != null) { - builder.field("min_doc_count", minDocCount); - } - - if (extendedBoundsMin != null || extendedBoundsMax != null) { - builder.startObject(ExtendedBounds.EXTENDED_BOUNDS_FIELD.getPreferredName()); - if (extendedBoundsMin != null) { - builder.field("min", extendedBoundsMin); - } - if (extendedBoundsMax != null) { - builder.field("max", extendedBoundsMax); - } - builder.endObject(); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java index 76e5acc5ffc..2dcf706dabb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramParser.java @@ -163,7 +163,7 @@ public class HistogramParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { HistogramAggregator.Factory.PROTOTYPE }; + public AggregatorFactory getFactoryPrototypes() { + return HistogramAggregator.Factory.PROTOTYPE; } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingBuilder.java deleted file mode 100644 index 9f51fd0a2c6..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingBuilder.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.missing; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; - -import java.io.IOException; - -/** - * Builder for the {@link Missing} aggregation. - */ -public class MissingBuilder extends AggregationBuilder { - - private String field; - - /** - * Sole constructor. - */ - public MissingBuilder(String name) { - super(name, InternalMissing.TYPE.name()); - } - - /** - * Set the field to count missing values on. - */ - public MissingBuilder field(String field) { - this.field = field; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (field != null) { - builder.field("field", field); - } - return builder.endObject(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java index 640ae52a14a..c242a35d1b4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/missing/MissingParser.java @@ -53,7 +53,7 @@ public class MissingParser extends AnyValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new MissingAggregator.Factory(null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new MissingAggregator.Factory(null, null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedBuilder.java deleted file mode 100644 index b375f098408..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedBuilder.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.nested; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -import java.io.IOException; - -/** - * Builder for the {@link Nested} aggregation. - */ -public class NestedBuilder extends AggregationBuilder { - - private String path; - - /** - * Sole constructor. - */ - public NestedBuilder(String name) { - super(name, InternalNested.TYPE.name()); - } - - /** - * Set the path to use for this nested aggregation. The path must match - * the path to a nested object in the mappings. This parameter is - * compulsory. - */ - public NestedBuilder path(String path) { - this.path = path; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (path == null) { - throw new SearchSourceBuilderException("nested path must be set on nested aggregation [" + getName() + "]"); - } - builder.field("path", path); - return builder.endObject(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedParser.java index 651b63d6652..00e79737ade 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedParser.java @@ -66,7 +66,7 @@ public class NestedParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new NestedAggregator.Factory(null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new NestedAggregator.Factory(null, null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedBuilder.java deleted file mode 100644 index 591655ed9aa..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedBuilder.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.nested; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; - -import java.io.IOException; - -/** - * Builder for the {@link ReverseNested} aggregation. - */ -public class ReverseNestedBuilder extends AggregationBuilder { - - private String path; - - /** - * Sole constructor. - */ - public ReverseNestedBuilder(String name) { - super(name, InternalReverseNested.TYPE.name()); - } - - /** - * Set the path to use for this nested aggregation. The path must match - * the path to a nested object in the mappings. If it is not specified - * then this aggregation will go back to the root document. - */ - public ReverseNestedBuilder path(String path) { - this.path = path; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (path != null) { - builder.field("path", path); - } - return builder.endObject(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedParser.java index 6e42e914985..b01b6396cb1 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedParser.java @@ -65,7 +65,7 @@ public class ReverseNestedParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new ReverseNestedAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new ReverseNestedAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java deleted file mode 100644 index c4f0c7600e7..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/AbstractRangeBuilder.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.range; - -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -/** - * - */ -public abstract class AbstractRangeBuilder> extends ValuesSourceAggregationBuilder { - - protected static class Range implements ToXContent { - - private String key; - private Object from; - private Object to; - - public Range(String key, Object from, Object to) { - this.key = key; - this.from = from; - this.to = to; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (key != null) { - builder.field("key", key); - } - if (from != null) { - builder.field("from", from); - } - if (to != null) { - builder.field("to", to); - } - return builder.endObject(); - } - } - - protected List ranges = new ArrayList<>(); - - protected AbstractRangeBuilder(String name, String type) { - super(name, type); - } - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (ranges.isEmpty()) { - throw new SearchSourceBuilderException("at least one range must be defined for range aggregation [" + getName() + "]"); - } - builder.startArray("ranges"); - for (Range range : ranges) { - range.toXContent(builder, params); - } - return builder.endArray(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index ae6f3e6a25c..3550c9d0495 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -75,8 +75,8 @@ public class RangeAggregator extends BucketsAggregator { protected double to = Double.POSITIVE_INFINITY; protected String toAsStr; - public Range(String key, double from, double to) { - this(key, from, null, to, null); + public Range(String key, Double from, Double to) { + this(key, from == null ? Double.NEGATIVE_INFINITY : from, null, to == null ? Double.POSITIVE_INFINITY : to, null); } public Range(String key, String from, String to) { @@ -396,20 +396,25 @@ public class RangeAggregator extends BucketsAggregator { } } - public static class Factory> extends ValuesSourceAggregatorFactory { + public static abstract class AbstractFactory, R extends Range> + extends ValuesSourceAggregatorFactory { private final InternalRange.Factory rangeFactory; - private final List ranges; + private List ranges = new ArrayList<>(); private boolean keyed = false; - public Factory(String name, List ranges) { - this(name, InternalRange.FACTORY, ranges); - } - - protected Factory(String name, InternalRange.Factory rangeFactory, List ranges) { + protected AbstractFactory(String name, InternalRange.Factory rangeFactory) { super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType()); this.rangeFactory = rangeFactory; - this.ranges = ranges; + } + + public AF addRange(R range) { + ranges.add(range); + return (AF) this; + } + + public List ranges() { + return ranges; } public AF keyed(boolean keyed) { @@ -443,19 +448,12 @@ public class RangeAggregator extends BucketsAggregator { @Override protected AF innerReadFrom(String name, ValuesSourceType valuesSourceType, ValueType targetValueType, StreamInput in) throws IOException { - Factory factory = createFactoryFromStream(name, in); + AbstractFactory factory = createFactoryFromStream(name, in); factory.keyed = in.readBoolean(); return (AF) factory; } - protected Factory createFactoryFromStream(String name, StreamInput in) throws IOException { - int size = in.readVInt(); - List ranges = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - ranges.add(Range.PROTOTYPE.readFrom(in)); - } - return new Factory(name, ranges); - } + protected abstract AbstractFactory createFactoryFromStream(String name, StreamInput in) throws IOException; @Override protected void innerWriteTo(StreamOutput out) throws IOException { @@ -473,10 +471,93 @@ public class RangeAggregator extends BucketsAggregator { @Override protected boolean innerEquals(Object obj) { - Factory other = (Factory) obj; + AbstractFactory other = (AbstractFactory) obj; return Objects.equals(ranges, other.ranges) && Objects.equals(keyed, other.keyed); } } + public static class Factory extends AbstractFactory { + + public Factory(String name) { + super(name, InternalRange.FACTORY); + } + + /** + * Add a new range to this aggregation. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the distances, inclusive + * @param to + * the upper bound on the distances, exclusive + */ + public Factory addRange(String key, double from, double to) { + addRange(new Range(key, from, to)); + return this; + } + + /** + * Same as {@link #addRange(String, double, double)} but the key will be + * automatically generated based on from and + * to. + */ + public Factory addRange(double from, double to) { + return addRange(null, from, to); + } + + /** + * Add a new range with no lower bound. + * + * @param key + * the key to use for this range in the response + * @param to + * the upper bound on the distances, exclusive + */ + public Factory addUnboundedTo(String key, double to) { + addRange(new Range(key, null, to)); + return this; + } + + /** + * Same as {@link #addUnboundedTo(String, double)} but the key will be + * computed automatically. + */ + public Factory addUnboundedTo(double to) { + return addUnboundedTo(null, to); + } + + /** + * Add a new range with no upper bound. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the distances, inclusive + */ + public Factory addUnboundedFrom(String key, double from) { + addRange(new Range(key, from, null)); + return this; + } + + /** + * Same as {@link #addUnboundedFrom(String, double)} but the key will be + * computed automatically. + */ + public Factory addUnboundedFrom(double from) { + return addUnboundedFrom(null, from); + } + + @Override + protected Factory createFactoryFromStream(String name, StreamInput in) throws IOException { + int size = in.readVInt(); + Factory factory = new Factory(name); + for (int i = 0; i < size; i++) { + factory.addRange(Range.PROTOTYPE.readFrom(in)); + } + return factory; + } + } + } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeBuilder.java deleted file mode 100644 index c7723972b07..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeBuilder.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.aggregations.bucket.range; - -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -/** - * Builder for the {@link org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder.Range} aggregation. - */ -public class RangeBuilder extends AbstractRangeBuilder { - - private String format; - - /** - * Sole constructor. - */ - public RangeBuilder(String name) { - super(name, InternalRange.TYPE.name()); - } - - /** - * Add a new range to this aggregation. - * - * @param key the key to use for this range in the response - * @param from the lower bound on the distances, inclusive - * @param to the upper bound on the distances, exclusive - */ - public RangeBuilder addRange(String key, double from, double to) { - ranges.add(new Range(key, from, to)); - return this; - } - - /** - * Same as {@link #addRange(String, double, double)} but the key will be - * automatically generated based on from and to. - */ - public RangeBuilder addRange(double from, double to) { - return addRange(null, from, to); - } - - /** - * Add a new range with no lower bound. - * - * @param key the key to use for this range in the response - * @param to the upper bound on the distances, exclusive - */ - public RangeBuilder addUnboundedTo(String key, double to) { - ranges.add(new Range(key, null, to)); - return this; - } - - /** - * Same as {@link #addUnboundedTo(String, double)} but the key will be - * computed automatically. - */ - public RangeBuilder addUnboundedTo(double to) { - return addUnboundedTo(null, to); - } - - /** - * Add a new range with no upper bound. - * - * @param key the key to use for this range in the response - * @param from the lower bound on the distances, inclusive - */ - public RangeBuilder addUnboundedFrom(String key, double from) { - ranges.add(new Range(key, from, null)); - return this; - } - - /** - * Same as {@link #addUnboundedFrom(String, double)} but the key will be - * computed automatically. - */ - public RangeBuilder addUnboundedFrom(double from) { - return addUnboundedFrom(null, from); - } - - /** - * Set the format to use to display values. - */ - public RangeBuilder format(String format) { - this.format = format; - return this; - } - - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - super.doInternalXContent(builder, params); - if (format != null) { - builder.field("format", format); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeParser.java index 8c8e830d55a..710f4205b57 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeParser.java @@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -53,10 +52,13 @@ public class RangeParser extends NumericValuesSourceParser { } @Override - protected RangeAggregator.Factory createFactory(String aggregationName, ValuesSourceType valuesSourceType, + protected RangeAggregator.AbstractFactory createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { + RangeAggregator.Factory factory = new RangeAggregator.Factory(aggregationName); List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); - RangeAggregator.Factory factory = new RangeAggregator.Factory(aggregationName, ranges); + for (Range range : ranges) { + factory.addRange(range); + } Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD); if (keyed != null) { factory.keyed(keyed); @@ -92,7 +94,7 @@ public class RangeParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new RangeAggregator.Factory(null, Collections.emptyList()) }; + public AggregatorFactory getFactoryPrototypes() { + return new RangeAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorFactory.java index aba3e1898f2..87fef876e3a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeAggregatorFactory.java @@ -20,17 +20,17 @@ package org.elasticsearch.search.aggregations.bucket.range.date; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Factory; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.AbstractFactory; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; +import org.joda.time.DateTime; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -public class DateRangeAggregatorFactory extends Factory { +public class DateRangeAggregatorFactory extends AbstractFactory { - public DateRangeAggregatorFactory(String name, List ranges) { - super(name, InternalDateRange.FACTORY, ranges); + public DateRangeAggregatorFactory(String name) { + super(name, InternalDateRange.FACTORY); } @Override @@ -38,14 +38,217 @@ public class DateRangeAggregatorFactory extends Factoryfrom and to. + */ + public DateRangeAggregatorFactory addRange(String from, String to) { + return addRange(null, from, to); + } + + /** + * Add a new range with no lower bound. + * + * @param key + * the key to use for this range in the response + * @param to + * the upper bound on the dates, exclusive + */ + public DateRangeAggregatorFactory addUnboundedTo(String key, String to) { + addRange(new Range(key, null, to)); + return this; + } + + /** + * Same as {@link #addUnboundedTo(String, String)} but the key will be + * computed automatically. + */ + public DateRangeAggregatorFactory addUnboundedTo(String to) { + return addUnboundedTo(null, to); + } + + /** + * Add a new range with no upper bound. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the distances, inclusive + */ + public DateRangeAggregatorFactory addUnboundedFrom(String key, String from) { + addRange(new Range(key, from, null)); + return this; + } + + /** + * Same as {@link #addUnboundedFrom(String, String)} but the key will be + * computed automatically. + */ + public DateRangeAggregatorFactory addUnboundedFrom(String from) { + return addUnboundedFrom(null, from); + } + + /** + * Add a new range to this aggregation. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the dates, inclusive + * @param to + * the upper bound on the dates, exclusive + */ + public DateRangeAggregatorFactory addRange(String key, double from, double to) { + addRange(new Range(key, from, to)); + return this; + } + + /** + * Same as {@link #addRange(String, double, double)} but the key will be + * automatically generated based on from and to. + */ + public DateRangeAggregatorFactory addRange(double from, double to) { + return addRange(null, from, to); + } + + /** + * Add a new range with no lower bound. + * + * @param key + * the key to use for this range in the response + * @param to + * the upper bound on the dates, exclusive + */ + public DateRangeAggregatorFactory addUnboundedTo(String key, double to) { + addRange(new Range(key, null, to)); + return this; + } + + /** + * Same as {@link #addUnboundedTo(String, double)} but the key will be + * computed automatically. + */ + public DateRangeAggregatorFactory addUnboundedTo(double to) { + return addUnboundedTo(null, to); + } + + /** + * Add a new range with no upper bound. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the distances, inclusive + */ + public DateRangeAggregatorFactory addUnboundedFrom(String key, double from) { + addRange(new Range(key, from, null)); + return this; + } + + /** + * Same as {@link #addUnboundedFrom(String, double)} but the key will be + * computed automatically. + */ + public DateRangeAggregatorFactory addUnboundedFrom(double from) { + return addUnboundedFrom(null, from); + } + + /** + * Add a new range to this aggregation. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the dates, inclusive + * @param to + * the upper bound on the dates, exclusive + */ + public DateRangeAggregatorFactory addRange(String key, DateTime from, DateTime to) { + addRange(new Range(key, convertDateTime(from), convertDateTime(to))); + return this; + } + + private Double convertDateTime(DateTime dateTime) { + if (dateTime == null) { + return null; + } else { + return (double) dateTime.getMillis(); + } + } + + /** + * Same as {@link #addRange(String, DateTime, DateTime)} but the key will be + * automatically generated based on from and to. + */ + public DateRangeAggregatorFactory addRange(DateTime from, DateTime to) { + return addRange(null, from, to); + } + + /** + * Add a new range with no lower bound. + * + * @param key + * the key to use for this range in the response + * @param to + * the upper bound on the dates, exclusive + */ + public DateRangeAggregatorFactory addUnboundedTo(String key, DateTime to) { + addRange(new Range(key, null, convertDateTime(to))); + return this; + } + + /** + * Same as {@link #addUnboundedTo(String, DateTime)} but the key will be + * computed automatically. + */ + public DateRangeAggregatorFactory addUnboundedTo(DateTime to) { + return addUnboundedTo(null, to); + } + + /** + * Add a new range with no upper bound. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the distances, inclusive + */ + public DateRangeAggregatorFactory addUnboundedFrom(String key, DateTime from) { + addRange(new Range(key, convertDateTime(from), null)); + return this; + } + + /** + * Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be + * computed automatically. + */ + public DateRangeAggregatorFactory addUnboundedFrom(DateTime from) { + return addUnboundedFrom(null, from); + } + @Override protected DateRangeAggregatorFactory createFactoryFromStream(String name, StreamInput in) throws IOException { int size = in.readVInt(); - List ranges = new ArrayList<>(size); + DateRangeAggregatorFactory factory = new DateRangeAggregatorFactory(name); for (int i = 0; i < size; i++) { - ranges.add(Range.PROTOTYPE.readFrom(in)); + factory.addRange(Range.PROTOTYPE.readFrom(in)); } - return new DateRangeAggregatorFactory(name, ranges); + return factory; } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeBuilder.java deleted file mode 100644 index 4bd57580fae..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeBuilder.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.aggregations.bucket.range.date; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder; - -import java.io.IOException; - -/** - * Builder for the {@code DateRange} aggregation. - */ -public class DateRangeBuilder extends AbstractRangeBuilder { - - private String format; - - /** - * Sole constructor. - */ - public DateRangeBuilder(String name) { - super(name, InternalDateRange.TYPE.name()); - } - - /** - * Add a new range to this aggregation. - * - * @param key the key to use for this range in the response - * @param from the lower bound on the distances, inclusive - * @param to the upper bound on the distances, exclusive - */ - public DateRangeBuilder addRange(String key, Object from, Object to) { - ranges.add(new Range(key, from, to)); - return this; - } - - /** - * Same as {@link #addRange(String, Object, Object)} but the key will be - * automatically generated based on from and to. - */ - public DateRangeBuilder addRange(Object from, Object to) { - return addRange(null, from, to); - } - - /** - * Add a new range with no lower bound. - * - * @param key the key to use for this range in the response - * @param to the upper bound on the distances, exclusive - */ - public DateRangeBuilder addUnboundedTo(String key, Object to) { - ranges.add(new Range(key, null, to)); - return this; - } - - /** - * Same as {@link #addUnboundedTo(String, Object)} but the key will be - * computed automatically. - */ - public DateRangeBuilder addUnboundedTo(Object to) { - return addUnboundedTo(null, to); - } - - /** - * Add a new range with no upper bound. - * - * @param key the key to use for this range in the response - * @param from the lower bound on the distances, inclusive - */ - public DateRangeBuilder addUnboundedFrom(String key, Object from) { - ranges.add(new Range(key, from, null)); - return this; - } - - /** - * Same as {@link #addUnboundedFrom(String, Object)} but the key will be - * computed automatically. - */ - public DateRangeBuilder addUnboundedFrom(Object from) { - return addUnboundedFrom(null, from); - } - - /** - * Set the format to use to display values. - */ - public DateRangeBuilder format(String format) { - this.format = format; - return this; - } - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - super.doInternalXContent(builder, params); - if (format != null) { - builder.field("format", format); - } - return builder; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeParser.java index 97f9266be05..0e3e4d58e3a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeParser.java @@ -26,7 +26,6 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeParser; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSourceType; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -47,8 +46,11 @@ public class DateRangeParser extends RangeParser { @Override protected DateRangeAggregatorFactory createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { + DateRangeAggregatorFactory factory = new DateRangeAggregatorFactory(aggregationName); List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); - DateRangeAggregatorFactory factory = new DateRangeAggregatorFactory(aggregationName, ranges); + for (Range range : ranges) { + factory.addRange(range); + } Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD); if (keyed != null) { factory.keyed(keyed); @@ -57,7 +59,7 @@ public class DateRangeParser extends RangeParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new DateRangeAggregatorFactory(null, Collections.emptyList()) }; + public AggregatorFactory getFactoryPrototypes() { + return new DateRangeAggregatorFactory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java deleted file mode 100644 index ae8fc22d769..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceBuilder.java +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.range.geodistance; - -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Locale; - -/** - * Builder for the {@link GeoDistance} aggregation. - */ -public class GeoDistanceBuilder extends AggregationBuilder { - - /** - * A range of values. - */ - public static class Range implements ToXContent { - - private String key; - private Double from; - private Double to; - - /** - * Create a new range. - * @param key the identifier of this range - * @param from the lower bound (inclusive) - * @param to the upper bound (exclusive) - */ - public Range(String key, Double from, Double to) { - this.key = key; - this.from = from; - this.to = to; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (from != null) { - builder.field("from", from.doubleValue()); - } - if (to != null) { - builder.field("to", to.doubleValue()); - } - if (key != null) { - builder.field("key", key); - } - return builder.endObject(); - } - - } - - private String field; - private DistanceUnit unit; - private GeoDistance distanceType; - private GeoPoint point; - - private List ranges = new ArrayList<>(); - - /** - * Sole constructor. - */ - public GeoDistanceBuilder(String name) { - super(name, InternalGeoDistance.TYPE.name()); - } - - /** - * Set the field to use to compute distances. - */ - public GeoDistanceBuilder field(String field) { - this.field = field; - return this; - } - - /** - * Set the unit to use for distances, default is kilometers. - */ - public GeoDistanceBuilder unit(DistanceUnit unit) { - this.unit = unit; - return this; - } - - /** - * Set the {@link GeoDistance distance type} to use, defaults to - * {@link GeoDistance#SLOPPY_ARC}. - */ - public GeoDistanceBuilder distanceType(GeoDistance distanceType) { - this.distanceType = distanceType; - return this; - } - - /** - * Set the point to calculate distances from using a - * lat,lon notation or geohash. - */ - public GeoDistanceBuilder point(String latLon) { - return point(GeoPoint.parseFromLatLon(latLon)); - } - - /** - * Set the point to calculate distances from. - */ - public GeoDistanceBuilder point(GeoPoint point) { - this.point = point; - return this; - } - - /** - * Set the point to calculate distances from using its geohash. - */ - public GeoDistanceBuilder geohash(String geohash) { - if (this.point == null) { - this.point = new GeoPoint(); - } - this.point.resetFromGeoHash(geohash); - return this; - } - - /** - * Set the latitude of the point to calculate distances from. - */ - public GeoDistanceBuilder lat(double lat) { - if (this.point == null) { - point = new GeoPoint(); - } - point.resetLat(lat); - return this; - } - - /** - * Set the longitude of the point to calculate distances from. - */ - public GeoDistanceBuilder lon(double lon) { - if (this.point == null) { - point = new GeoPoint(); - } - point.resetLon(lon); - return this; - } - - /** - * Add a new range to this aggregation. - * - * @param key the key to use for this range in the response - * @param from the lower bound on the distances, inclusive - * @param to the upper bound on the distances, exclusive - */ - public GeoDistanceBuilder addRange(String key, double from, double to) { - ranges.add(new Range(key, from, to)); - return this; - } - - /** - * Same as {@link #addRange(String, double, double)} but the key will be - * automatically generated based on from and to. - */ - public GeoDistanceBuilder addRange(double from, double to) { - return addRange(null, from, to); - } - - /** - * Add a new range with no lower bound. - * - * @param key the key to use for this range in the response - * @param to the upper bound on the distances, exclusive - */ - public GeoDistanceBuilder addUnboundedTo(String key, double to) { - ranges.add(new Range(key, null, to)); - return this; - } - - /** - * Same as {@link #addUnboundedTo(String, double)} but the key will be - * computed automatically. - */ - public GeoDistanceBuilder addUnboundedTo(double to) { - return addUnboundedTo(null, to); - } - - /** - * Add a new range with no upper bound. - * - * @param key the key to use for this range in the response - * @param from the lower bound on the distances, inclusive - */ - public GeoDistanceBuilder addUnboundedFrom(String key, double from) { - ranges.add(new Range(key, from, null)); - return this; - } - - /** - * Same as {@link #addUnboundedFrom(String, double)} but the key will be - * computed automatically. - */ - public GeoDistanceBuilder addUnboundedFrom(double from) { - return addUnboundedFrom(null, from); - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (ranges.isEmpty()) { - throw new SearchSourceBuilderException("at least one range must be defined for geo_distance aggregation [" + getName() + "]"); - } - if (point == null) { - throw new SearchSourceBuilderException("center point must be defined for geo_distance aggregation [" + getName() + "]"); - } - - if (field != null) { - builder.field("field", field); - } - - if (unit != null) { - builder.field("unit", unit); - } - - if (distanceType != null) { - builder.field("distance_type", distanceType.name().toLowerCase(Locale.ROOT)); - } - - builder.startObject("center") - .field("lat", point.lat()) - .field("lon", point.lon()) - .endObject(); - - builder.startArray("ranges"); - for (Range range : ranges) { - range.toXContent(builder, params); - } - builder.endArray(); - - return builder.endObject(); - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java index 4aece20a409..4201071531a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/GeoDistanceParser.java @@ -50,7 +50,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -77,20 +76,20 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { public static class Range extends RangeAggregator.Range { - static final Range PROTOTYPE = new Range(null, -1, -1); + static final Range PROTOTYPE = new Range(null, null, null); - public Range(String key, double from, double to) { + public Range(String key, Double from, Double to) { super(key(key, from, to), from, to); } - private static String key(String key, double from, double to) { + private static String key(String key, Double from, Double to) { if (key != null) { return key; } StringBuilder sb = new StringBuilder(); - sb.append(from == 0 ? "*" : from); + sb.append((from == null || from == 0) ? "*" : from); sb.append("-"); - sb.append(Double.isInfinite(to) ? "*" : to); + sb.append((to == null || Double.isInfinite(to)) ? "*" : to); return sb.toString(); } @@ -115,8 +114,11 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { protected GeoDistanceFactory createFactory( String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { GeoPoint origin = (GeoPoint) otherOptions.get(ORIGIN_FIELD); + GeoDistanceFactory factory = new GeoDistanceFactory(aggregationName, origin); List ranges = (List) otherOptions.get(RangeAggregator.RANGES_FIELD); - GeoDistanceFactory factory = new GeoDistanceFactory(aggregationName, origin, ranges); + for (Range range : ranges) { + factory.addRange(range); + } Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD); if (keyed != null) { factory.keyed(keyed); @@ -199,20 +201,94 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { private final GeoPoint origin; private final InternalRange.Factory rangeFactory; - private final List ranges; + private List ranges = new ArrayList<>(); private DistanceUnit unit = DistanceUnit.DEFAULT; private GeoDistance distanceType = GeoDistance.DEFAULT; private boolean keyed = false; - public GeoDistanceFactory(String name, GeoPoint origin, List ranges) { - this(name, origin, InternalGeoDistance.FACTORY, ranges); + public GeoDistanceFactory(String name, GeoPoint origin) { + this(name, origin, InternalGeoDistance.FACTORY); } - private GeoDistanceFactory(String name, GeoPoint origin, InternalRange.Factory rangeFactory, List ranges) { + private GeoDistanceFactory(String name, GeoPoint origin, InternalRange.Factory rangeFactory) { super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType()); this.origin = origin; this.rangeFactory = rangeFactory; - this.ranges = ranges; + } + + public GeoDistanceFactory addRange(Range range) { + ranges.add(range); + return this; + } + + /** + * Add a new range to this aggregation. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the distances, inclusive + * @param to + * the upper bound on the distances, exclusive + */ + public GeoDistanceFactory addRange(String key, double from, double to) { + ranges.add(new Range(key, from, to)); + return this; + } + + /** + * Same as {@link #addRange(String, double, double)} but the key will be + * automatically generated based on from and + * to. + */ + public GeoDistanceFactory addRange(double from, double to) { + return addRange(null, from, to); + } + + /** + * Add a new range with no lower bound. + * + * @param key + * the key to use for this range in the response + * @param to + * the upper bound on the distances, exclusive + */ + public GeoDistanceFactory addUnboundedTo(String key, double to) { + ranges.add(new Range(key, null, to)); + return this; + } + + /** + * Same as {@link #addUnboundedTo(String, double)} but the key will be + * computed automatically. + */ + public GeoDistanceFactory addUnboundedTo(double to) { + return addUnboundedTo(null, to); + } + + /** + * Add a new range with no upper bound. + * + * @param key + * the key to use for this range in the response + * @param from + * the lower bound on the distances, inclusive + */ + public GeoDistanceFactory addUnboundedFrom(String key, double from) { + addRange(new Range(key, from, null)); + return this; + } + + /** + * Same as {@link #addUnboundedFrom(String, double)} but the key will be + * computed automatically. + */ + public GeoDistanceFactory addUnboundedFrom(double from) { + return addUnboundedFrom(null, from); + } + + public List range() { + return ranges; } @Override @@ -280,11 +356,10 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { String name, ValuesSourceType valuesSourceType, ValueType targetValueType, StreamInput in) throws IOException { GeoPoint origin = new GeoPoint(in.readDouble(), in.readDouble()); int size = in.readVInt(); - List ranges = new ArrayList<>(size); + GeoDistanceFactory factory = new GeoDistanceFactory(name, origin); for (int i = 0; i < size; i++) { - ranges.add(Range.PROTOTYPE.readFrom(in)); + factory.addRange(Range.PROTOTYPE.readFrom(in)); } - GeoDistanceFactory factory = new GeoDistanceFactory(name, origin, ranges); factory.keyed = in.readBoolean(); factory.distanceType = GeoDistance.readGeoDistanceFrom(in); factory.unit = DistanceUnit.readDistanceUnit(in); @@ -361,8 +436,8 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new GeoDistanceFactory(null, null, Collections.emptyList()) }; + public AggregatorFactory getFactoryPrototypes() { + return new GeoDistanceFactory(null, null); } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeAggregatorFactory.java index ab6747a8592..4eddc328399 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeAggregatorFactory.java @@ -27,17 +27,15 @@ import org.elasticsearch.common.network.Cidrs; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; -import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Factory; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.AbstractFactory; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import java.util.Objects; -public class IPv4RangeAggregatorFactory extends Factory { +public class IPv4RangeAggregatorFactory extends AbstractFactory { - public IPv4RangeAggregatorFactory(String name, List ranges) { - super(name, InternalIPv4Range.FACTORY, ranges); + public IPv4RangeAggregatorFactory(String name) { + super(name, InternalIPv4Range.FACTORY); } @Override @@ -45,14 +43,87 @@ public class IPv4RangeAggregatorFactory extends Factory ranges = new ArrayList<>(size); + IPv4RangeAggregatorFactory factory = new IPv4RangeAggregatorFactory(name); for (int i = 0; i < size; i++) { - ranges.add(Range.PROTOTYPE.readFrom(in)); + factory.addRange(Range.PROTOTYPE.readFrom(in)); } - return new IPv4RangeAggregatorFactory(name, ranges); + return factory; } public static class Range extends RangeAggregator.Range { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeBuilder.java deleted file mode 100644 index 5ac3f2a6d4e..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IPv4RangeBuilder.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.range.ipv4; - -import org.elasticsearch.common.network.Cidrs; -import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilderException; - -/** - * Builder for the {@code IPv4Range} aggregation. - */ -public class IPv4RangeBuilder extends AbstractRangeBuilder { - - /** - * Sole constructor. - */ - public IPv4RangeBuilder(String name) { - super(name, InternalIPv4Range.TYPE.name()); - } - - /** - * Add a new range to this aggregation. - * - * @param key the key to use for this range in the response - * @param from the lower bound on the distances, inclusive - * @param to the upper bound on the distances, exclusive - */ - public IPv4RangeBuilder addRange(String key, String from, String to) { - ranges.add(new Range(key, from, to)); - return this; - } - - /** - * Same as {@link #addMaskRange(String, String)} but uses the mask itself as a key. - */ - public IPv4RangeBuilder addMaskRange(String mask) { - return addMaskRange(mask, mask); - } - - /** - * Add a range based on a CIDR mask. - */ - public IPv4RangeBuilder addMaskRange(String key, String mask) { - long[] fromTo; - try { - fromTo = Cidrs.cidrMaskToMinMax(mask); - } catch (IllegalArgumentException e) { - throw new SearchSourceBuilderException("invalid CIDR mask [" + mask + "] in ip_range aggregation [" + getName() + "]", e); - } - ranges.add(new Range(key, fromTo[0] == 0 ? null : fromTo[0], fromTo[1] == InternalIPv4Range.MAX_IP ? null : fromTo[1])); - return this; - } - - /** - * Same as {@link #addRange(String, String, String)} but the key will be - * automatically generated. - */ - public IPv4RangeBuilder addRange(String from, String to) { - return addRange(null, from, to); - } - - /** - * Same as {@link #addRange(String, String, String)} but there will be no lower bound. - */ - public IPv4RangeBuilder addUnboundedTo(String key, String to) { - ranges.add(new Range(key, null, to)); - return this; - } - - /** - * Same as {@link #addUnboundedTo(String, String)} but the key will be - * generated automatically. - */ - public IPv4RangeBuilder addUnboundedTo(String to) { - return addUnboundedTo(null, to); - } - - /** - * Same as {@link #addRange(String, String, String)} but there will be no upper bound. - */ - public IPv4RangeBuilder addUnboundedFrom(String key, String from) { - ranges.add(new Range(key, from, null)); - return this; - } - - /** - * Same as {@link #addUnboundedFrom(String, String)} but the key will be - * generated automatically. - */ - public IPv4RangeBuilder addUnboundedFrom(String from) { - return addUnboundedFrom(null, from); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java index 5c9af1afab1..82fe569637c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/ipv4/IpRangeParser.java @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -55,9 +54,12 @@ public class IpRangeParser extends RangeParser { @Override protected IPv4RangeAggregatorFactory createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { + IPv4RangeAggregatorFactory factory = new IPv4RangeAggregatorFactory(aggregationName); List ranges = (List) otherOptions .get(RangeAggregator.RANGES_FIELD); - IPv4RangeAggregatorFactory factory = new IPv4RangeAggregatorFactory(aggregationName, ranges); + for (IPv4RangeAggregatorFactory.Range range : ranges) { + factory.addRange(range); + } Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD); if (keyed != null) { factory.keyed(keyed); @@ -66,8 +68,8 @@ public class IpRangeParser extends RangeParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new IPv4RangeAggregatorFactory(null, Collections.emptyList()) }; + public AggregatorFactory getFactoryPrototypes() { + return new IPv4RangeAggregatorFactory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerAggregationBuilder.java deleted file mode 100644 index d68e3eaa621..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerAggregationBuilder.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.sampler; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder; - -import java.io.IOException; - -/** - * Builder for the {@link Sampler} aggregation. - */ -public class DiversifiedSamplerAggregationBuilder extends ValuesSourceAggregationBuilder { - - private int shardSize = SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE; - - int maxDocsPerValue = SamplerAggregator.DiversifiedFactory.MAX_DOCS_PER_VALUE_DEFAULT; - String executionHint = null; - - /** - * Sole constructor. - */ - public DiversifiedSamplerAggregationBuilder(String name) { - super(name, SamplerAggregator.DiversifiedFactory.TYPE.name()); - } - - /** - * Set the max num docs to be returned from each shard. - */ - public DiversifiedSamplerAggregationBuilder shardSize(int shardSize) { - this.shardSize = shardSize; - return this; - } - - public DiversifiedSamplerAggregationBuilder maxDocsPerValue(int maxDocsPerValue) { - this.maxDocsPerValue = maxDocsPerValue; - return this; - } - - public DiversifiedSamplerAggregationBuilder executionHint(String executionHint) { - this.executionHint = executionHint; - return this; - } - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (shardSize != SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE) { - builder.field(SamplerAggregator.SHARD_SIZE_FIELD.getPreferredName(), shardSize); - } - - if (maxDocsPerValue != SamplerAggregator.DiversifiedFactory.MAX_DOCS_PER_VALUE_DEFAULT) { - builder.field(SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD.getPreferredName(), maxDocsPerValue); - } - if (executionHint != null) { - builder.field(SamplerAggregator.EXECUTION_HINT_FIELD.getPreferredName(), executionHint); - } - - return builder; - } - - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java index 99e9f42bdac..93ced0b81b2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerParser.java @@ -47,8 +47,7 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser { @Override protected SamplerAggregator.DiversifiedFactory createFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map otherOptions) { - SamplerAggregator.DiversifiedFactory factory = new SamplerAggregator.DiversifiedFactory(aggregationName, valuesSourceType, - targetValueType); + SamplerAggregator.DiversifiedFactory factory = new SamplerAggregator.DiversifiedFactory(aggregationName); Integer shardSize = (Integer) otherOptions.get(SamplerAggregator.SHARD_SIZE_FIELD); if (shardSize != null) { factory.shardSize(shardSize); @@ -88,8 +87,8 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new SamplerAggregator.DiversifiedFactory(null, null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new SamplerAggregator.DiversifiedFactory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java deleted file mode 100644 index fb444e67b13..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregationBuilder.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.sampler; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder; - -import java.io.IOException; - -/** - * Builder for the {@link Sampler} aggregation. - */ -public class SamplerAggregationBuilder extends ValuesSourceAggregationBuilder { - - private int shardSize = SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE; - - /** - * Sole constructor. - */ - public SamplerAggregationBuilder(String name) { - super(name, InternalSampler.TYPE.name()); - } - - /** - * Set the max num docs to be returned from each shard. - */ - public SamplerAggregationBuilder shardSize(int shardSize) { - this.shardSize = shardSize; - return this; - } - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (shardSize != SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE) { - builder.field(SamplerAggregator.SHARD_SIZE_FIELD.getPreferredName(), shardSize); - } - - return builder; - } - - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index f768e961b4e..9c3454e0b85 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -264,8 +264,8 @@ public class SamplerAggregator extends SingleBucketAggregator { private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT; private String executionHint = null; - public DiversifiedFactory(String name, ValuesSourceType valueSourceType, ValueType valueType) { - super(name, TYPE, valueSourceType, valueType); + public DiversifiedFactory(String name) { + super(name, TYPE, ValuesSourceType.ANY, null); } /** @@ -372,7 +372,7 @@ public class SamplerAggregator extends SingleBucketAggregator { @Override protected DiversifiedFactory innerReadFrom(String name, ValuesSourceType valuesSourceType, ValueType targetValueType, StreamInput in) throws IOException { - DiversifiedFactory factory = new DiversifiedFactory(name, valuesSourceType, targetValueType); + DiversifiedFactory factory = new DiversifiedFactory(name); factory.shardSize = in.readVInt(); factory.maxDocsPerValue = in.readVInt(); factory.executionHint = in.readOptionalString(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java index 995f368f665..3259c75d91e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerParser.java @@ -68,8 +68,8 @@ public class SamplerParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new SamplerAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new SamplerAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 2505a2d00cb..3bb61d7e988 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -161,8 +161,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac return new TermsAggregator.BucketCountThresholds(bucketCountThresholds); } - public SignificantTermsAggregatorFactory(String name, ValuesSourceType valuesSourceType, ValueType valueType) { - super(name, SignificantStringTerms.TYPE, valuesSourceType, valueType); + public SignificantTermsAggregatorFactory(String name, ValueType valueType) { + super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType); } public TermsAggregator.BucketCountThresholds bucketCountThresholds() { @@ -174,6 +174,44 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac return this; } + /** + * Sets the size - indicating how many term buckets should be returned + * (defaults to 10) + */ + public SignificantTermsAggregatorFactory size(int size) { + bucketCountThresholds.setRequiredSize(size); + return this; + } + + /** + * Sets the shard_size - indicating the number of term buckets each shard + * will return to the coordinating node (the node that coordinates the + * search execution). The higher the shard size is, the more accurate the + * results are. + */ + public SignificantTermsAggregatorFactory shardSize(int shardSize) { + bucketCountThresholds.setShardSize(shardSize); + return this; + } + + /** + * Set the minimum document count terms should have in order to appear in + * the response. + */ + public SignificantTermsAggregatorFactory minDocCount(long minDocCount) { + bucketCountThresholds.setMinDocCount(minDocCount); + return this; + } + + /** + * Set the minimum document count terms should have on the shard in order to + * appear in the response. + */ + public SignificantTermsAggregatorFactory shardMinDocCount(long shardMinDocCount) { + bucketCountThresholds.setShardMinDocCount(shardMinDocCount); + return this; + } + /** * Expert: sets an execution hint to the aggregation. */ @@ -399,7 +437,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac @Override protected SignificantTermsAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType, ValueType targetValueType, StreamInput in) throws IOException { - SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(name, valuesSourceType, targetValueType); + SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(name, targetValueType); factory.bucketCountThresholds = BucketCountThresholds.readFromStream(in); factory.executionHint = in.readOptionalString(); if (in.readBoolean()) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsBuilder.java deleted file mode 100644 index 6bbb3348b79..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsBuilder.java +++ /dev/null @@ -1,277 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.bucket.significant; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; - -import java.io.IOException; - -/** - * Creates an aggregation that finds interesting or unusual occurrences of terms in a result set. - *

- * This feature is marked as experimental, and may be subject to change in the future. If you - * use this feature, please let us know your experience with it! - */ -public class SignificantTermsBuilder extends AggregationBuilder { - - private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(-1, -1, -1, -1); - - private String field; - private String executionHint; - private String includePattern; - private int includeFlags; - private String excludePattern; - private int excludeFlags; - private String[] includeTerms = null; - private String[] excludeTerms = null; - private QueryBuilder filterBuilder; - private SignificanceHeuristicBuilder significanceHeuristicBuilder; - - /** - * Sole constructor. - */ - public SignificantTermsBuilder(String name) { - super(name, SignificantStringTerms.TYPE.name()); - } - - /** - * Set the field to fetch significant terms from. - */ - public SignificantTermsBuilder field(String field) { - this.field = field; - return this; - } - - /** - * Set the number of significant terms to retrieve. - */ - public SignificantTermsBuilder size(int requiredSize) { - bucketCountThresholds.setRequiredSize(requiredSize); - return this; - } - - /** - * Expert: Set the number of significant terms to retrieve on each shard. - */ - public SignificantTermsBuilder shardSize(int shardSize) { - bucketCountThresholds.setShardSize(shardSize); - return this; - } - - /** - * Only return significant terms that belong to at least minDocCount documents. - */ - public SignificantTermsBuilder minDocCount(int minDocCount) { - bucketCountThresholds.setMinDocCount(minDocCount); - return this; - } - - /** - * Set the background filter to compare to. Defaults to the whole index. - */ - public SignificantTermsBuilder backgroundFilter(QueryBuilder filter) { - this.filterBuilder = filter; - return this; - } - - /** - * Expert: set the minimum number of documents that a term should match to - * be retrieved from a shard. - */ - public SignificantTermsBuilder shardMinDocCount(int shardMinDocCount) { - bucketCountThresholds.setShardMinDocCount(shardMinDocCount); - return this; - } - - /** - * Expert: give an execution hint to this aggregation. - */ - public SignificantTermsBuilder executionHint(String executionHint) { - this.executionHint = executionHint; - return this; - } - - /** - * Define a regular expression that will determine what terms should be aggregated. The regular expression is based - * on the {@link java.util.regex.Pattern} class. - * - * @see #include(String, int) - */ - public SignificantTermsBuilder include(String regex) { - return include(regex, 0); - } - - /** - * Define a regular expression that will determine what terms should be aggregated. The regular expression is based - * on the {@link java.util.regex.Pattern} class. - * - * @see java.util.regex.Pattern#compile(String, int) - */ - public SignificantTermsBuilder include(String regex, int flags) { - if (includeTerms != null) { - throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both"); - } - this.includePattern = regex; - this.includeFlags = flags; - return this; - } - - /** - * Define a set of terms that should be aggregated. - */ - public SignificantTermsBuilder include(String [] terms) { - if (includePattern != null) { - throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both"); - } - this.includeTerms = terms; - return this; - } - - /** - * Define a set of terms that should be aggregated. - */ - public SignificantTermsBuilder include(long [] terms) { - if (includePattern != null) { - throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both"); - } - this.includeTerms = longsArrToStringArr(terms); - return this; - } - - private String[] longsArrToStringArr(long[] terms) { - String[] termsAsString = new String[terms.length]; - for (int i = 0; i < terms.length; i++) { - termsAsString[i] = Long.toString(terms[i]); - } - return termsAsString; - } - - - /** - * Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular - * expression is based on the {@link java.util.regex.Pattern} class. - * - * @see #exclude(String, int) - */ - public SignificantTermsBuilder exclude(String regex) { - return exclude(regex, 0); - } - - /** - * Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular - * expression is based on the {@link java.util.regex.Pattern} class. - * - * @see java.util.regex.Pattern#compile(String, int) - */ - public SignificantTermsBuilder exclude(String regex, int flags) { - if (excludeTerms != null) { - throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both"); - } - this.excludePattern = regex; - this.excludeFlags = flags; - return this; - } - - /** - * Define a set of terms that should not be aggregated. - */ - public SignificantTermsBuilder exclude(String [] terms) { - if (excludePattern != null) { - throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both"); - } - this.excludeTerms = terms; - return this; - } - - - /** - * Define a set of terms that should not be aggregated. - */ - public SignificantTermsBuilder exclude(long [] terms) { - if (excludePattern != null) { - throw new IllegalArgumentException("exclude clause must be an array of longs or a regex, not both"); - } - this.excludeTerms = longsArrToStringArr(terms); - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (field != null) { - builder.field("field", field); - } - bucketCountThresholds.toXContent(builder, params); - if (executionHint != null) { - builder.field(TermsAggregatorFactory.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); - } - if (includePattern != null) { - if (includeFlags == 0) { - builder.field("include", includePattern); - } else { - builder.startObject("include") - .field("pattern", includePattern) - .field("flags", includeFlags) - .endObject(); - } - } - if (includeTerms != null) { - builder.array("include", includeTerms); - } - - if (excludePattern != null) { - if (excludeFlags == 0) { - builder.field("exclude", excludePattern); - } else { - builder.startObject("exclude") - .field("pattern", excludePattern) - .field("flags", excludeFlags) - .endObject(); - } - } - if (excludeTerms != null) { - builder.array("exclude", excludeTerms); - } - - if (filterBuilder != null) { - builder.field(SignificantTermsAggregatorFactory.BACKGROUND_FILTER.getPreferredName()); - filterBuilder.toXContent(builder, params); - } - if (significanceHeuristicBuilder != null) { - significanceHeuristicBuilder.toXContent(builder, params); - } - - return builder.endObject(); - } - - /** - * Expert: set the {@link SignificanceHeuristic} to use. - */ - public SignificantTermsBuilder significanceHeuristic(SignificanceHeuristicBuilder significanceHeuristicBuilder) { - this.significanceHeuristicBuilder = significanceHeuristicBuilder; - return this; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java index 35797363c05..54f3d3aca29 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsParser.java @@ -64,8 +64,7 @@ public class SignificantTermsParser extends AbstractTermsParser { protected SignificantTermsAggregatorFactory doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint, IncludeExclude incExc, Map otherOptions) { - SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(aggregationName, valuesSourceType, - targetValueType); + SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(aggregationName, targetValueType); if (bucketCountThresholds != null) { factory.bucketCountThresholds(bucketCountThresholds); } @@ -108,8 +107,8 @@ public class SignificantTermsParser extends AbstractTermsParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new SignificantTermsAggregatorFactory(null, null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new SignificantTermsAggregatorFactory(null, null); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java index 753c9ccb3e5..c4327cd19e8 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/JLHScore.java @@ -38,7 +38,8 @@ public class JLHScore extends SignificanceHeuristic { protected static final ParseField NAMES_FIELD = new ParseField("jlh"); - private JLHScore() {} + public JLHScore() { + } @Override public String getWriteableName() { @@ -112,7 +113,7 @@ public class JLHScore extends SignificanceHeuristic { throws IOException, QueryShardException { // move to the closing bracket if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { - throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken()); + throw new ElasticsearchParseException("failed to parse [jlh] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken()); } return PROTOTYPE; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java index e6cfe9a9bf5..648c1268d7f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/PercentageScore.java @@ -38,7 +38,8 @@ public class PercentageScore extends SignificanceHeuristic { protected static final ParseField NAMES_FIELD = new ParseField("percentage"); - private PercentageScore() {} + public PercentageScore() { + } @Override public String getWriteableName() { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 1e7a0047ea5..91e949e190f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -347,7 +347,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr Map metaData) throws IOException { super(name, factories, valuesSource, order, bucketCountThresholds, null, aggregationContext, parent, collectionMode, showTermDocCountError, pipelineAggregators, metaData); - assert factories == null || factories.count() == 0; + assert factories == null || factories.countAggregators() == 0; this.segmentDocCounts = context.bigArrays().newIntArray(1, true); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java index b5e1e814790..f3f87c09dca 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalOrder.java @@ -96,7 +96,7 @@ class InternalOrder extends Terms.Order { public static boolean isCountDesc(Terms.Order order) { if (order == COUNT_DESC) { return true; - }else if (order instanceof CompoundOrder) { + } else if (order instanceof CompoundOrder) { // check if its a compound order with count desc and the tie breaker (term asc) CompoundOrder compoundOrder = (CompoundOrder) order; if (compoundOrder.orderElements.size() == 2 && compoundOrder.orderElements.get(0) == COUNT_DESC && compoundOrder.orderElements.get(1) == TERM_ASC) { @@ -106,6 +106,23 @@ class InternalOrder extends Terms.Order { return false; } + public static boolean isTermOrder(Terms.Order order) { + if (order == TERM_ASC) { + return true; + } else if (order == TERM_DESC) { + return true; + } else if (order instanceof CompoundOrder) { + // check if its a compound order with only a single element ordering + // by term + CompoundOrder compoundOrder = (CompoundOrder) order; + if (compoundOrder.orderElements.size() == 1 && compoundOrder.orderElements.get(0) == TERM_ASC + || compoundOrder.orderElements.get(0) == TERM_DESC) { + return true; + } + } + return false; + } + final byte id; final String key; final boolean asc; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 31285d20f8e..6e5aaac2e8e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -188,7 +188,7 @@ public abstract class InternalTerms pipelineAggregators, Map metaData) throws IOException { - if (includeExclude != null || factories.count() > 0 + if (includeExclude != null || factories.countAggregators() > 0 // we need the FieldData impl to be able to extract the // segment to global ord mapping || valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) { @@ -182,7 +180,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory orders = Collections.singletonList(Terms.Order.count(false)); + private Terms.Order order = Terms.Order.compound(Terms.Order.count(false), Terms.Order.term(true)); private IncludeExclude includeExclude = null; private String executionHint = null; private SubAggCollectionMode collectMode = SubAggCollectionMode.DEPTH_FIRST; @@ -190,8 +188,8 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory order) { - this.orders = order; + public TermsAggregatorFactory order(Terms.Order order) { + this.order = order; + return this; + } + + /** + * Sets the order in which the buckets will be returned. + */ + public TermsAggregatorFactory order(List orders) { + order(Terms.Order.compound(orders)); return this; } /** * Gets the order in which the buckets will be returned. */ - public List order() { - return orders; + public Terms.Order order() { + return order; } /** @@ -281,7 +325,6 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - Terms.Order order = resolveOrder(orders); final InternalAggregation aggregation = new UnmappedTerms(name, order, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), pipelineAggregators, metaData); return new NonCollectingAggregator(name, aggregationContext, parent, factories, pipelineAggregators, metaData) { @@ -315,7 +358,6 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException { - Terms.Order order = resolveOrder(orders); if (collectsFromSingleBucket == false) { return asMultiBucketAggregator(this, aggregationContext, parent); } @@ -415,11 +457,8 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory orders = new ArrayList<>(numOrders); - for (int i = 0; i < numOrders; i++) { - orders.add(InternalOrder.Streams.readOrder(in)); - } - factory.orders = orders; + factory.order = InternalOrder.Streams.readOrder(in); factory.showTermDocCountError = in.readBoolean(); return factory; } @@ -457,16 +491,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory { - - private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(-1, -1, -1, -1); - - private Terms.ValueType valueType; - private Terms.Order order; - private String includePattern; - private String excludePattern; - private String executionHint; - private SubAggCollectionMode collectionMode; - private Boolean showTermDocCountError; - private String[] includeTerms = null; - private String[] excludeTerms = null; - - /** - * Sole constructor. - */ - public TermsBuilder(String name) { - super(name, "terms"); - } - - /** - * Sets the size - indicating how many term buckets should be returned (defaults to 10) - */ - public TermsBuilder size(int size) { - bucketCountThresholds.setRequiredSize(size); - return this; - } - - /** - * Sets the shard_size - indicating the number of term buckets each shard will return to the coordinating node (the - * node that coordinates the search execution). The higher the shard size is, the more accurate the results are. - */ - public TermsBuilder shardSize(int shardSize) { - bucketCountThresholds.setShardSize(shardSize); - return this; - } - - /** - * Set the minimum document count terms should have in order to appear in the response. - */ - public TermsBuilder minDocCount(long minDocCount) { - bucketCountThresholds.setMinDocCount(minDocCount); - return this; - } - - /** - * Set the minimum document count terms should have on the shard in order to appear in the response. - */ - public TermsBuilder shardMinDocCount(long shardMinDocCount) { - bucketCountThresholds.setShardMinDocCount(shardMinDocCount); - return this; - } - - /** - * Define a regular expression that will determine what terms should be aggregated. The regular expression is based - * on the {@link RegExp} class. - * - * @see RegExp#RegExp(String) - */ - public TermsBuilder include(String regex) { - if (includeTerms != null) { - throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both"); - } - this.includePattern = regex; - return this; - } - - /** - * Define a set of terms that should be aggregated. - */ - public TermsBuilder include(String [] terms) { - if (includePattern != null) { - throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both"); - } - this.includeTerms = terms; - return this; - } - - /** - * Define a set of terms that should be aggregated. - */ - public TermsBuilder include(long [] terms) { - if (includePattern != null) { - throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both"); - } - this.includeTerms = longsArrToStringArr(terms); - return this; - } - - private String[] longsArrToStringArr(long[] terms) { - String[] termsAsString = new String[terms.length]; - for (int i = 0; i < terms.length; i++) { - termsAsString[i] = Long.toString(terms[i]); - } - return termsAsString; - } - - - /** - * Define a set of terms that should be aggregated. - */ - public TermsBuilder include(double [] terms) { - if (includePattern != null) { - throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both"); - } - this.includeTerms = doubleArrToStringArr(terms); - return this; - } - - private String[] doubleArrToStringArr(double[] terms) { - String[] termsAsString = new String[terms.length]; - for (int i = 0; i < terms.length; i++) { - termsAsString[i] = Double.toString(terms[i]); - } - return termsAsString; - } - - /** - * Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular - * expression is based on the {@link RegExp} class. - * - * @see RegExp#RegExp(String) - */ - public TermsBuilder exclude(String regex) { - if (excludeTerms != null) { - throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both"); - } - this.excludePattern = regex; - return this; - } - - /** - * Define a set of terms that should not be aggregated. - */ - public TermsBuilder exclude(String [] terms) { - if (excludePattern != null) { - throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both"); - } - this.excludeTerms = terms; - return this; - } - - - /** - * Define a set of terms that should not be aggregated. - */ - public TermsBuilder exclude(long [] terms) { - if (excludePattern != null) { - throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both"); - } - this.excludeTerms = longsArrToStringArr(terms); - return this; - } - - /** - * Define a set of terms that should not be aggregated. - */ - public TermsBuilder exclude(double [] terms) { - if (excludePattern != null) { - throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both"); - } - this.excludeTerms = doubleArrToStringArr(terms); - return this; - } - - - - /** - * When using scripts, the value type indicates the types of the values the script is generating. - */ - public TermsBuilder valueType(Terms.ValueType valueType) { - this.valueType = valueType; - return this; - } - - /** - * Defines the order in which the buckets will be returned. - */ - public TermsBuilder order(Terms.Order order) { - this.order = order; - return this; - } - - /** - * Expert: provide an execution hint to the aggregation. - */ - public TermsBuilder executionHint(String executionHint) { - this.executionHint = executionHint; - return this; - } - - /** - * Expert: set the collection mode. - */ - public TermsBuilder collectMode(SubAggCollectionMode mode) { - this.collectionMode = mode; - return this; - } - - /** - * Expert: return document count errors per term in the response. - */ - public TermsBuilder showTermDocCountError(boolean showTermDocCountError) { - this.showTermDocCountError = showTermDocCountError; - return this; - } - - @Override - protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException { - - bucketCountThresholds.toXContent(builder, params); - - if (showTermDocCountError != null) { - builder.field(TermsAggregatorFactory.SHOW_TERM_DOC_COUNT_ERROR.getPreferredName(), showTermDocCountError); - } - if (executionHint != null) { - builder.field(TermsAggregatorFactory.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); - } - if (valueType != null) { - builder.field("value_type", valueType.name().toLowerCase(Locale.ROOT)); - } - if (order != null) { - builder.field("order"); - order.toXContent(builder, params); - } - if (collectionMode != null) { - builder.field(SubAggCollectionMode.KEY.getPreferredName(), collectionMode.parseField().getPreferredName()); - } - if (includeTerms != null) { - builder.array("include", includeTerms); - } - if (includePattern != null) { - builder.field("include", includePattern); - } - if (excludeTerms != null) { - builder.array("exclude", excludeTerms); - } - if (excludePattern != null) { - builder.field("exclude", excludePattern); - } - return builder; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java index 5b8368a2189..7e300af763a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsParser.java @@ -52,7 +52,7 @@ public class TermsParser extends AbstractTermsParser { protected TermsAggregatorFactory doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint, IncludeExclude incExc, Map otherOptions) { - TermsAggregatorFactory factory = new TermsAggregatorFactory(aggregationName, valuesSourceType, targetValueType); + TermsAggregatorFactory factory = new TermsAggregatorFactory(aggregationName, targetValueType); List orderElements = (List) otherOptions.get(TermsAggregatorFactory.ORDER_FIELD); if (orderElements != null) { List orders = new ArrayList<>(orderElements.size()); @@ -97,7 +97,7 @@ public class TermsParser extends AbstractTermsParser { orderElements.add(orderParam); } else { throw new ParsingException(parser.getTokenLocation(), - "Order elements must be of type object in [" + aggregationName + "]."); + "Order elements must be of type object in [" + aggregationName + "] found token of type [" + token + "]."); } } otherOptions.put(TermsAggregatorFactory.ORDER_FIELD, orderElements); @@ -179,8 +179,8 @@ public class TermsParser extends AbstractTermsParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new TermsAggregatorFactory(null, null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new TermsAggregatorFactory(null, null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index f6df150a4cf..eee9d4cbf90 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.terms.support; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; + import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Terms; @@ -226,6 +227,10 @@ public class IncludeExclude implements Writeable, ToXContent { this.excludeValues = null; } + public IncludeExclude(String include, String exclude) { + this(include == null ? null : new RegExp(include), exclude == null ? null : new RegExp(exclude)); + } + /** * @param includeValues The terms to be included * @param excludeValues The terms to be excluded @@ -240,6 +245,51 @@ public class IncludeExclude implements Writeable, ToXContent { this.excludeValues = excludeValues; } + public IncludeExclude(String[] includeValues, String[] excludeValues) { + this(convertToBytesRefSet(includeValues), convertToBytesRefSet(excludeValues)); + } + + public IncludeExclude(double[] includeValues, double[] excludeValues) { + this(convertToBytesRefSet(includeValues), convertToBytesRefSet(excludeValues)); + } + + public IncludeExclude(long[] includeValues, long[] excludeValues) { + this(convertToBytesRefSet(includeValues), convertToBytesRefSet(excludeValues)); + } + + private static SortedSet convertToBytesRefSet(String[] values) { + SortedSet returnSet = null; + if (values != null) { + returnSet = new TreeSet<>(); + for (String value : values) { + returnSet.add(new BytesRef(value)); + } + } + return returnSet; + } + + private static SortedSet convertToBytesRefSet(double[] values) { + SortedSet returnSet = null; + if (values != null) { + returnSet = new TreeSet<>(); + for (double value : values) { + returnSet.add(new BytesRef(String.valueOf(value))); + } + } + return returnSet; + } + + private static SortedSet convertToBytesRefSet(long[] values) { + SortedSet returnSet = null; + if (values != null) { + returnSet = new TreeSet<>(); + for (long value : values) { + returnSet.add(new BytesRef(String.valueOf(value))); + } + } + return returnSet; + } + /** * Terms adapter around doc values. */ diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregationBuilder.java deleted file mode 100644 index 56ae24bbd73..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregationBuilder.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.metrics; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; - -import java.io.IOException; - -/** - * Base builder for metrics aggregations. - */ -public abstract class MetricsAggregationBuilder> extends AbstractAggregationBuilder { - - public MetricsAggregationBuilder(String name, String type) { - super(name, type); - } - - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(getName()).startObject(type); - internalXContent(builder, params); - return builder.endObject().endObject(); - } - - protected abstract void internalXContent(XContentBuilder builder, Params params) throws IOException; -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java deleted file mode 100644 index e6755486225..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/ValuesSourceMetricsAggregationBuilder.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.metrics; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.Script; - -import java.io.IOException; - -/** - * - */ -public abstract class ValuesSourceMetricsAggregationBuilder> extends MetricsAggregationBuilder { - - private String field; - private Script script; - private String format; - private Object missing; - - protected ValuesSourceMetricsAggregationBuilder(String name, String type) { - super(name, type); - } - - @SuppressWarnings("unchecked") - public B field(String field) { - this.field = field; - return (B) this; - } - - /** - * The script to use for this aggregation - */ - @SuppressWarnings("unchecked") - public B script(Script script) { - this.script = script; - return (B) this; - } - - @SuppressWarnings("unchecked") - public B format(String format) { - this.format = format; - return (B) this; - } - - /** - * Configure the value to use when documents miss a value. - */ - public B missing(Object missingValue) { - this.missing = missingValue; - return (B) this; - } - - @Override - protected void internalXContent(XContentBuilder builder, Params params) throws IOException { - if (field != null) { - builder.field("field", field); - } - - if (script != null) { - builder.field("script", script); - } - - if (format != null) { - builder.field("format", format); - } - - if (missing != null) { - builder.field("missing", missing); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java index 110a368ba6d..70ebbf58a81 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgParser.java @@ -56,8 +56,8 @@ public class AvgParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new AvgAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new AvgAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java index 1b2876b66a5..33178570360 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityParser.java @@ -72,7 +72,7 @@ public class CardinalityParser extends AnyValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new CardinalityAggregatorFactory(null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new CardinalityAggregatorFactory(null, null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsParser.java index 44ab63c4909..cf09286a36e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsParser.java @@ -66,8 +66,8 @@ public class GeoBoundsParser extends GeoPointValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new GeoBoundsAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new GeoBoundsAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java index 1c1b195ce07..f69913c528c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidParser.java @@ -58,7 +58,7 @@ public class GeoCentroidParser extends GeoPointValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new GeoCentroidAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new GeoCentroidAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java index 4ca7cc36669..e8d8b0d3262 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxParser.java @@ -56,8 +56,8 @@ public class MaxParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new MaxAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new MaxAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java index 25083c0a9d1..75cd9e8a3f7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinParser.java @@ -57,7 +57,7 @@ public class MinParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new MinAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new MinAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesBuilder.java deleted file mode 100644 index c587d6423db..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesBuilder.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.metrics.percentiles; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregationBuilder; - -import java.io.IOException; - -abstract class AbstractPercentilesBuilder> extends - ValuesSourceMetricsAggregationBuilder { - - private Double compression; - private PercentilesMethod method; - private Integer numberOfSignificantValueDigits; - - public AbstractPercentilesBuilder(String name, String type) { - super(name, type); - } - - /** - * Expert: Set the method to use to compute the percentiles. - */ - public PB method(PercentilesMethod method) { - this.method = method; - return (PB) this; - } - - /** - * Expert: set the compression. Higher values improve accuracy but also - * memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}. - */ - public PB compression(double compression) { - this.compression = compression; - return (PB) this; - } - - /** - * Expert: set the number of significant digits in the values. Only relevant - * when using {@link PercentilesMethod#HDR}. - */ - public PB numberOfSignificantValueDigits(int numberOfSignificantValueDigits) { - this.numberOfSignificantValueDigits = numberOfSignificantValueDigits; - return (PB) this; - } - - @Override - protected void internalXContent(XContentBuilder builder, Params params) throws IOException { - super.internalXContent(builder, params); - - doInternalXContent(builder, params); - - if (method != null) { - builder.startObject(method.getName()); - - if (compression != null) { - builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression); - } - - if (numberOfSignificantValueDigits != null) { - builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits); - } - - builder.endObject(); - } - } - - protected abstract void doInternalXContent(XContentBuilder builder, Params params) throws IOException; - -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorFactory.java new file mode 100644 index 00000000000..27db5895699 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregatorFactory.java @@ -0,0 +1,230 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.percentiles; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentileRanksAggregator; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentileRanksAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory.LeafOnly; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class PercentileRanksAggregatorFactory extends LeafOnly { + + private double[] values; + private PercentilesMethod method = PercentilesMethod.TDIGEST; + private int numberOfSignificantValueDigits = 3; + private double compression = 100.0; + private boolean keyed = false; + + public PercentileRanksAggregatorFactory(String name) { + super(name, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); + } + + /** + * Set the values to compute percentiles from. + */ + public PercentileRanksAggregatorFactory values(double... values) { + double[] sortedValues = Arrays.copyOf(values, values.length); + Arrays.sort(sortedValues); + this.values = sortedValues; + return this; + } + + /** + * Get the values to compute percentiles from. + */ + public double[] values() { + return values; + } + + /** + * Set whether the XContent response should be keyed + */ + public PercentileRanksAggregatorFactory keyed(boolean keyed) { + this.keyed = keyed; + return this; + } + + /** + * Get whether the XContent response should be keyed + */ + public boolean keyed() { + return keyed; + } + + /** + * Expert: set the number of significant digits in the values. Only relevant + * when using {@link PercentilesMethod#HDR}. + */ + public PercentileRanksAggregatorFactory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) { + this.numberOfSignificantValueDigits = numberOfSignificantValueDigits; + return this; + } + + /** + * Expert: get the number of significant digits in the values. Only relevant + * when using {@link PercentilesMethod#HDR}. + */ + public int numberOfSignificantValueDigits() { + return numberOfSignificantValueDigits; + } + + /** + * Expert: set the compression. Higher values improve accuracy but also + * memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}. + */ + public PercentileRanksAggregatorFactory compression(double compression) { + this.compression = compression; + return this; + } + + /** + * Expert: get the compression. Higher values improve accuracy but also + * memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}. + */ + public double compression() { + return compression; + } + + public PercentileRanksAggregatorFactory method(PercentilesMethod method) { + this.method = method; + return this; + } + + public PercentilesMethod method() { + return method; + } + + @Override + protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, + List pipelineAggregators, Map metaData) throws IOException { + switch (method) { + case TDIGEST: + return new TDigestPercentileRanksAggregator(name, null, aggregationContext, parent, values, compression, keyed, + config.formatter(), + pipelineAggregators, metaData); + case HDR: + return new HDRPercentileRanksAggregator(name, null, aggregationContext, parent, values, numberOfSignificantValueDigits, keyed, + config.formatter(), pipelineAggregators, metaData); + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + } + + @Override + protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, + boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) + throws IOException { + switch (method) { + case TDIGEST: + return new TDigestPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, compression, keyed, + config.formatter(), pipelineAggregators, metaData); + case HDR: + return new HDRPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, numberOfSignificantValueDigits, + keyed, config.formatter(), pipelineAggregators, metaData); + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + } + + @Override + protected PercentileRanksAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType, + ValueType targetValueType, StreamInput in) throws IOException { + PercentileRanksAggregatorFactory factory = new PercentileRanksAggregatorFactory(name); + factory.values = in.readDoubleArray(); + factory.keyed = in.readBoolean(); + factory.numberOfSignificantValueDigits = in.readVInt(); + factory.compression = in.readDouble(); + factory.method = PercentilesMethod.TDIGEST.readFrom(in); + return factory; + } + + @Override + protected void innerWriteTo(StreamOutput out) throws IOException { + out.writeDoubleArray(values); + out.writeBoolean(keyed); + out.writeVInt(numberOfSignificantValueDigits); + out.writeDouble(compression); + method.writeTo(out); + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values); + builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed); + builder.startObject(method.getName()); + if (method == PercentilesMethod.TDIGEST) { + builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression); + } else { + builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits); + } + builder.endObject(); + return builder; + } + + @Override + protected boolean innerEquals(Object obj) { + PercentileRanksAggregatorFactory other = (PercentileRanksAggregatorFactory) obj; + if (!Objects.equals(method, other.method)) { + return false; + } + boolean equalSettings = false; + switch (method) { + case HDR: + equalSettings = Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits); + break; + case TDIGEST: + equalSettings = Objects.equals(compression, other.compression); + break; + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + return equalSettings + && Objects.deepEquals(values, other.values) + && Objects.equals(keyed, other.keyed) + && Objects.equals(method, other.method); + } + + @Override + protected int innerHashCode() { + switch (method) { + case HDR: + return Objects.hash(Arrays.hashCode(values), keyed, numberOfSignificantValueDigits, method); + case TDIGEST: + return Objects.hash(Arrays.hashCode(values), keyed, compression, method); + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksBuilder.java deleted file mode 100644 index abb9bc5bf81..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksBuilder.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.aggregations.metrics.percentiles; - -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -/** - * Builder for the {@link PercentileRanks} aggregation. - */ -public class PercentileRanksBuilder extends AbstractPercentilesBuilder { - - private double[] values; - - /** - * Sole constructor. - */ - public PercentileRanksBuilder(String name) { - super(name, PercentileRanks.TYPE_NAME); - } - - /** - * Set the values to compute percentiles from. - */ - public PercentileRanksBuilder percentiles(double... values) { - this.values = values; - return this; - } - - @Override - protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { - - if (values != null) { - builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksParser.java index 553800a33eb..d0de4f57139 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksParser.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; import org.elasticsearch.common.ParseField; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentileRanksAggregator; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentileRanksAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; @@ -50,38 +48,28 @@ public class PercentileRanksParser extends AbstractPercentilesParser { @Override protected ValuesSourceAggregatorFactory buildFactory(String aggregationName, double[] keys, PercentilesMethod method, Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) { - if (method == PercentilesMethod.TDIGEST) { - TDigestPercentileRanksAggregator.Factory factory = new TDigestPercentileRanksAggregator.Factory(aggregationName); - if (keys != null) { - factory.values(keys); - } - if (compression != null) { - factory.compression(compression); - } - if (keyed != null) { - factory.keyed(keyed); - } - return factory; - } else if (method == PercentilesMethod.HDR) { - HDRPercentileRanksAggregator.Factory factory = new HDRPercentileRanksAggregator.Factory(aggregationName); - if (keys != null) { - factory.values(keys); - } - if (numberOfSignificantValueDigits != null) { - factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits); - } - if (keyed != null) { - factory.keyed(keyed); - } - return factory; - } else { - throw new AssertionError(); + PercentileRanksAggregatorFactory factory = new PercentileRanksAggregatorFactory(aggregationName); + if (keys != null) { + factory.values(keys); } + if (method != null) { + factory.method(method); + } + if (compression != null) { + factory.compression(compression); + } + if (numberOfSignificantValueDigits != null) { + factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits); + } + if (keyed != null) { + factory.keyed(keyed); + } + return factory; } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new TDigestPercentileRanksAggregator.Factory(null), new HDRPercentileRanksAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new PercentileRanksAggregatorFactory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorFactory.java new file mode 100644 index 00000000000..3492fb446c2 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregatorFactory.java @@ -0,0 +1,230 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.percentiles; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregator; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory.LeafOnly; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class PercentilesAggregatorFactory extends LeafOnly { + + private double[] percents = PercentilesParser.DEFAULT_PERCENTS; + private PercentilesMethod method = PercentilesMethod.TDIGEST; + private int numberOfSignificantValueDigits = 3; + private double compression = 100.0; + private boolean keyed = false; + + public PercentilesAggregatorFactory(String name) { + super(name, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); + } + + /** + * Set the values to compute percentiles from. + */ + public PercentilesAggregatorFactory percentiles(double... percents) { + double[] sortedPercents = Arrays.copyOf(percents, percents.length); + Arrays.sort(sortedPercents); + this.percents = sortedPercents; + return this; + } + + /** + * Get the values to compute percentiles from. + */ + public double[] percentiles() { + return percents; + } + + /** + * Set whether the XContent response should be keyed + */ + public PercentilesAggregatorFactory keyed(boolean keyed) { + this.keyed = keyed; + return this; + } + + /** + * Get whether the XContent response should be keyed + */ + public boolean keyed() { + return keyed; + } + + /** + * Expert: set the number of significant digits in the values. Only relevant + * when using {@link PercentilesMethod#HDR}. + */ + public PercentilesAggregatorFactory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) { + this.numberOfSignificantValueDigits = numberOfSignificantValueDigits; + return this; + } + + /** + * Expert: get the number of significant digits in the values. Only relevant + * when using {@link PercentilesMethod#HDR}. + */ + public int numberOfSignificantValueDigits() { + return numberOfSignificantValueDigits; + } + + /** + * Expert: set the compression. Higher values improve accuracy but also + * memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}. + */ + public PercentilesAggregatorFactory compression(double compression) { + this.compression = compression; + return this; + } + + /** + * Expert: get the compression. Higher values improve accuracy but also + * memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}. + */ + public double compression() { + return compression; + } + + public PercentilesAggregatorFactory method(PercentilesMethod method) { + this.method = method; + return this; + } + + public PercentilesMethod method() { + return method; + } + + @Override + protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, + List pipelineAggregators, Map metaData) throws IOException { + switch (method) { + case TDIGEST: + return new TDigestPercentilesAggregator(name, null, aggregationContext, parent, percents, compression, keyed, + config.formatter(), + pipelineAggregators, metaData); + case HDR: + return new HDRPercentilesAggregator(name, null, aggregationContext, parent, percents, numberOfSignificantValueDigits, keyed, + config.formatter(), pipelineAggregators, metaData); + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + } + + @Override + protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, + boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) + throws IOException { + switch (method) { + case TDIGEST: + return new TDigestPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, compression, keyed, + config.formatter(), pipelineAggregators, metaData); + case HDR: + return new HDRPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, numberOfSignificantValueDigits, + keyed, config.formatter(), pipelineAggregators, metaData); + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + } + + @Override + protected PercentilesAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType, + ValueType targetValueType, StreamInput in) throws IOException { + PercentilesAggregatorFactory factory = new PercentilesAggregatorFactory(name); + factory.percents = in.readDoubleArray(); + factory.keyed = in.readBoolean(); + factory.numberOfSignificantValueDigits = in.readVInt(); + factory.compression = in.readDouble(); + factory.method = PercentilesMethod.TDIGEST.readFrom(in); + return factory; + } + + @Override + protected void innerWriteTo(StreamOutput out) throws IOException { + out.writeDoubleArray(percents); + out.writeBoolean(keyed); + out.writeVInt(numberOfSignificantValueDigits); + out.writeDouble(compression); + method.writeTo(out); + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents); + builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed); + builder.startObject(method.getName()); + if (method == PercentilesMethod.TDIGEST) { + builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression); + } else { + builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits); + } + builder.endObject(); + return builder; + } + + @Override + protected boolean innerEquals(Object obj) { + PercentilesAggregatorFactory other = (PercentilesAggregatorFactory) obj; + if (!Objects.equals(method, other.method)) { + return false; + } + boolean equalSettings = false; + switch (method) { + case HDR: + equalSettings = Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits); + break; + case TDIGEST: + equalSettings = Objects.equals(compression, other.compression); + break; + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + return equalSettings + && Objects.deepEquals(percents, other.percents) + && Objects.equals(keyed, other.keyed) + && Objects.equals(method, other.method); + } + + @Override + protected int innerHashCode() { + switch (method) { + case HDR: + return Objects.hash(Arrays.hashCode(percents), keyed, numberOfSignificantValueDigits, method); + case TDIGEST: + return Objects.hash(Arrays.hashCode(percents), keyed, compression, method); + default: + throw new IllegalStateException("Illegal method [" + method.getName() + "]"); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesBuilder.java deleted file mode 100644 index 399f9eabe20..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesBuilder.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.aggregations.metrics.percentiles; - -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - - -/** - * Builder for the {@link Percentiles} aggregation. - */ -public class PercentilesBuilder extends AbstractPercentilesBuilder { - - double[] percentiles; - /** - * Sole constructor. - */ - public PercentilesBuilder(String name) { - super(name, Percentiles.TYPE_NAME); - } - - /** - * Set the percentiles to compute. - */ - public PercentilesBuilder percentiles(double... percentiles) { - for (int i = 0; i < percentiles.length; i++) { - if (percentiles[i] < 0 || percentiles[i] > 100) { - throw new IllegalArgumentException("the percents in the percentiles aggregation [" + - getName() + "] must be in the [0, 100] range"); - } - } - this.percentiles = percentiles; - return this; - } - - @Override - protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (percentiles != null) { - builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percentiles); - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethod.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethod.java index c593b1f19cc..44316b621e0 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethod.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethod.java @@ -19,11 +19,16 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; /** * An enum representing the methods for calculating percentiles */ -public enum PercentilesMethod { +public enum PercentilesMethod implements Writeable { /** * The TDigest method for calculating percentiles */ @@ -46,6 +51,20 @@ public enum PercentilesMethod { return name; } + @Override + public PercentilesMethod readFrom(StreamInput in) throws IOException { + int ordinal = in.readVInt(); + if (ordinal < 0 || ordinal >= values().length) { + throw new IOException("Unknown PercentilesMethod ordinal [" + ordinal + "]"); + } + return values()[ordinal]; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(ordinal()); + } + /** * Returns the {@link PercentilesMethod} for this method name. returns * null if no {@link PercentilesMethod} exists for the name. diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesParser.java index f4820697af4..4079f9ec601 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesParser.java @@ -20,9 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; import org.elasticsearch.common.ParseField; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregator; import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; @@ -52,38 +50,28 @@ public class PercentilesParser extends AbstractPercentilesParser { @Override protected ValuesSourceAggregatorFactory buildFactory(String aggregationName, double[] keys, PercentilesMethod method, Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) { - if (method == PercentilesMethod.TDIGEST) { - TDigestPercentilesAggregator.Factory factory = new TDigestPercentilesAggregator.Factory(aggregationName); - if (keys != null) { - factory.percents(keys); - } - if (compression != null) { - factory.compression(compression); - } - if (keyed != null) { - factory.keyed(keyed); - } - return factory; - } else if (method == PercentilesMethod.HDR) { - HDRPercentilesAggregator.Factory factory = new HDRPercentilesAggregator.Factory(aggregationName); - if (keys != null) { - factory.percents(keys); - } - if (numberOfSignificantValueDigits != null) { - factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits); - } - if (keyed != null) { - factory.keyed(keyed); - } - return factory; - } else { - throw new AssertionError(); + PercentilesAggregatorFactory factory = new PercentilesAggregatorFactory(aggregationName); + if (keys != null) { + factory.percentiles(keys); } + if (method != null) { + factory.method(method); + } + if (compression != null) { + factory.compression(compression); + } + if (numberOfSignificantValueDigits != null) { + factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits); + } + if (keyed != null) { + factory.keyed(keyed); + } + return factory; } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new TDigestPercentilesAggregator.Factory(null), new HDRPercentilesAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new PercentilesAggregatorFactory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregator.java index a0dc9ed2710..603871e7909 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregator.java @@ -19,28 +19,16 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; import org.HdrHistogram.DoubleHistogram; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.Objects; /** * @@ -82,116 +70,4 @@ public class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregat return InternalHDRPercentileRanks.percentileRank(state, Double.valueOf(name)); } } - - public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly { - - private double[] values; - private int numberOfSignificantValueDigits = 3; - private boolean keyed = false; - - public Factory(String name) { - super(name, InternalHDRPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); - } - - /** - * Set the values to compute percentiles from. - */ - public Factory values(double[] values) { - double[] sortedValues = Arrays.copyOf(values, values.length); - Arrays.sort(sortedValues); - this.values = sortedValues; - return this; - } - - /** - * Get the values to compute percentiles from. - */ - public double[] values() { - return values; - } - - /** - * Set whether the XContent response should be keyed - */ - public Factory keyed(boolean keyed) { - this.keyed = keyed; - return this; - } - - /** - * Get whether the XContent response should be keyed - */ - public boolean keyed() { - return keyed; - } - - /** - * Expert: set the number of significant digits in the values. - */ - public Factory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) { - this.numberOfSignificantValueDigits = numberOfSignificantValueDigits; - return this; - } - - /** - * Expert: set the number of significant digits in the values. - */ - public int numberOfSignificantValueDigits() { - return numberOfSignificantValueDigits; - } - - @Override - protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, - List pipelineAggregators, Map metaData) throws IOException { - return new HDRPercentileRanksAggregator(name, null, aggregationContext, parent, values, numberOfSignificantValueDigits, keyed, - config.formatter(), pipelineAggregators, metaData); - } - - @Override - protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, - boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) - throws IOException { - return new HDRPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, numberOfSignificantValueDigits, - keyed, config.formatter(), pipelineAggregators, metaData); - } - - @Override - protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType, - ValueType targetValueType, StreamInput in) throws IOException { - Factory factory = new Factory(name); - factory.values = in.readDoubleArray(); - factory.keyed = in.readBoolean(); - factory.numberOfSignificantValueDigits = in.readVInt(); - return factory; - } - - @Override - protected void innerWriteTo(StreamOutput out) throws IOException { - out.writeDoubleArray(values); - out.writeBoolean(keyed); - out.writeVInt(numberOfSignificantValueDigits); - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values); - builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed); - builder.startObject(PercentilesMethod.HDR.getName()); - builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits); - builder.endObject(); - return builder; - } - - @Override - protected boolean innerEquals(Object obj) { - Factory other = (Factory) obj; - return Objects.deepEquals(values, other.values) && Objects.equals(keyed, other.keyed) - && Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits); - } - - @Override - protected int innerHashCode() { - return Objects.hash(Arrays.hashCode(values), keyed, numberOfSignificantValueDigits); - } - } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregator.java index 55f0c1f6cf3..f51769daab8 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregator.java @@ -19,28 +19,16 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; import org.HdrHistogram.DoubleHistogram; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.Objects; /** * @@ -83,116 +71,4 @@ public class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator { keyed, formatter, pipelineAggregators(), metaData()); } - - public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly { - - private double[] percents = PercentilesParser.DEFAULT_PERCENTS; - private int numberOfSignificantValueDigits = 3; - private boolean keyed = false; - - public Factory(String name) { - super(name, InternalHDRPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); - } - - /** - * Set the percentiles to compute. - */ - public Factory percents(double[] percents) { - double[] sortedPercents = Arrays.copyOf(percents, percents.length); - Arrays.sort(sortedPercents); - this.percents = sortedPercents; - return this; - } - - /** - * Get the percentiles to compute. - */ - public double[] percents() { - return percents; - } - - /** - * Set whether the XContent response should be keyed - */ - public Factory keyed(boolean keyed) { - this.keyed = keyed; - return this; - } - - /** - * Get whether the XContent response should be keyed - */ - public boolean keyed() { - return keyed; - } - - /** - * Expert: set the number of significant digits in the values. - */ - public Factory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) { - this.numberOfSignificantValueDigits = numberOfSignificantValueDigits; - return this; - } - - /** - * Expert: set the number of significant digits in the values. - */ - public int numberOfSignificantValueDigits() { - return numberOfSignificantValueDigits; - } - - @Override - protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, - List pipelineAggregators, Map metaData) throws IOException { - return new HDRPercentilesAggregator(name, null, aggregationContext, parent, percents, numberOfSignificantValueDigits, keyed, - config.formatter(), pipelineAggregators, metaData); - } - - @Override - protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, - boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) - throws IOException { - return new HDRPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, numberOfSignificantValueDigits, - keyed, config.formatter(), pipelineAggregators, metaData); - } - - @Override - protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType, - ValueType targetValueType, StreamInput in) throws IOException { - Factory factory = new Factory(name); - factory.percents = in.readDoubleArray(); - factory.keyed = in.readBoolean(); - factory.numberOfSignificantValueDigits = in.readVInt(); - return factory; - } - - @Override - protected void innerWriteTo(StreamOutput out) throws IOException { - out.writeDoubleArray(percents); - out.writeBoolean(keyed); - out.writeVInt(numberOfSignificantValueDigits); - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents); - builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed); - builder.startObject(PercentilesMethod.HDR.getName()); - builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits); - builder.endObject(); - return builder; - } - - @Override - protected boolean innerEquals(Object obj) { - Factory other = (Factory) obj; - return Objects.deepEquals(percents, other.percents) && Objects.equals(keyed, other.keyed) - && Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits); - } - - @Override - protected int innerHashCode() { - return Objects.hash(Arrays.hashCode(percents), keyed, numberOfSignificantValueDigits); - } - } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregator.java index 8f56a2c892e..3db68ca5855 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregator.java @@ -18,28 +18,16 @@ */ package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.Objects; /** * @@ -77,118 +65,4 @@ public class TDigestPercentileRanksAggregator extends AbstractTDigestPercentiles return InternalTDigestPercentileRanks.percentileRank(state, Double.valueOf(name)); } } - - public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly { - - private double[] values; - private double compression = 100.0; - private boolean keyed = false; - - public Factory(String name) { - super(name, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); - } - - /** - * Set the values to compute percentiles from. - */ - public Factory values(double[] values) { - double[] sortedValues = Arrays.copyOf(values, values.length); - Arrays.sort(sortedValues); - this.values = sortedValues; - return this; - } - - /** - * Get the values to compute percentiles from. - */ - public double[] values() { - return values; - } - - /** - * Set whether the XContent response should be keyed - */ - public Factory keyed(boolean keyed) { - this.keyed = keyed; - return this; - } - - /** - * Get whether the XContent response should be keyed - */ - public boolean keyed() { - return keyed; - } - - /** - * Expert: set the compression. Higher values improve accuracy but also - * memory usage. - */ - public Factory compression(double compression) { - this.compression = compression; - return this; - } - - /** - * Expert: set the compression. Higher values improve accuracy but also - * memory usage. - */ - public double compression() { - return compression; - } - - @Override - protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, - List pipelineAggregators, Map metaData) throws IOException { - return new TDigestPercentileRanksAggregator(name, null, aggregationContext, parent, values, compression, keyed, config.formatter(), - pipelineAggregators, metaData); - } - - @Override - protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, - boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) - throws IOException { - return new TDigestPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, compression, keyed, - config.formatter(), pipelineAggregators, metaData); - } - - @Override - protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType, - ValueType targetValueType, StreamInput in) throws IOException { - Factory factory = new Factory(name); - factory.values = in.readDoubleArray(); - factory.keyed = in.readBoolean(); - factory.compression = in.readDouble(); - return factory; - } - - @Override - protected void innerWriteTo(StreamOutput out) throws IOException { - out.writeDoubleArray(values); - out.writeBoolean(keyed); - out.writeDouble(compression); - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values); - builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed); - builder.startObject(PercentilesMethod.TDIGEST.getName()); - builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression); - builder.endObject(); - return builder; - } - - @Override - protected boolean innerEquals(Object obj) { - Factory other = (Factory) obj; - return Objects.deepEquals(values, other.values) && Objects.equals(keyed, other.keyed) - && Objects.equals(compression, other.compression); - } - - @Override - protected int innerHashCode() { - return Objects.hash(Arrays.hashCode(values), keyed, compression); - } - } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregator.java index ac864d63960..3bf7cff061c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregator.java @@ -18,28 +18,16 @@ */ package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; -import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Map; -import java.util.Objects; /** * @@ -77,118 +65,4 @@ public class TDigestPercentilesAggregator extends AbstractTDigestPercentilesAggr public InternalAggregation buildEmptyAggregation() { return new InternalTDigestPercentiles(name, keys, new TDigestState(compression), keyed, formatter, pipelineAggregators(), metaData()); } - - public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly { - - private double[] percents = PercentilesParser.DEFAULT_PERCENTS; - private double compression = 100.0; - private boolean keyed = false; - - public Factory(String name) { - super(name, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); - } - - /** - * Set the percentiles to compute. - */ - public Factory percents(double[] percents) { - double[] sortedPercents = Arrays.copyOf(percents, percents.length); - Arrays.sort(sortedPercents); - this.percents = sortedPercents; - return this; - } - - /** - * Get the percentiles to compute. - */ - public double[] percents() { - return percents; - } - - /** - * Set whether the XContent response should be keyed - */ - public Factory keyed(boolean keyed) { - this.keyed = keyed; - return this; - } - - /** - * Get whether the XContent response should be keyed - */ - public boolean keyed() { - return keyed; - } - - /** - * Expert: set the compression. Higher values improve accuracy but also - * memory usage. - */ - public Factory compression(double compression) { - this.compression = compression; - return this; - } - - /** - * Expert: set the compression. Higher values improve accuracy but also - * memory usage. - */ - public double compression() { - return compression; - } - - @Override - protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent, - List pipelineAggregators, Map metaData) throws IOException { - return new TDigestPercentilesAggregator(name, null, aggregationContext, parent, percents, compression, keyed, config.formatter(), - pipelineAggregators, metaData); - } - - @Override - protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent, - boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData) - throws IOException { - return new TDigestPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, compression, keyed, - config.formatter(), pipelineAggregators, metaData); - } - - @Override - protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType, - ValueType targetValueType, StreamInput in) throws IOException { - Factory factory = new Factory(name); - factory.percents = in.readDoubleArray(); - factory.keyed = in.readBoolean(); - factory.compression = in.readDouble(); - return factory; - } - - @Override - protected void innerWriteTo(StreamOutput out) throws IOException { - out.writeDoubleArray(percents); - out.writeBoolean(keyed); - out.writeDouble(compression); - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents); - builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed); - builder.startObject(PercentilesMethod.TDIGEST.getName()); - builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression); - builder.endObject(); - return builder; - } - - @Override - protected boolean innerEquals(Object obj) { - Factory other = (Factory) obj; - return Objects.deepEquals(percents, other.percents) && Objects.equals(keyed, other.keyed) - && Objects.equals(compression, other.compression); - } - - @Override - protected int innerHashCode() { - return Objects.hash(Arrays.hashCode(percents), keyed, compression); - } - } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricParser.java index f5e61724fe3..66dd65fb52e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricParser.java @@ -151,8 +151,8 @@ public class ScriptedMetricParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new ScriptedMetricAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new ScriptedMetricAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java index db08df7914c..b0bbecad3aa 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsParser.java @@ -56,7 +56,7 @@ public class StatsParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new StatsAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new StatsAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java index 91e7db28425..ebe8b84cd93 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsParser.java @@ -67,7 +67,7 @@ public class ExtendedStatsParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new ExtendedStatsAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new ExtendedStatsAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java index b6330232598..2e40ddf5ebc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumParser.java @@ -56,7 +56,7 @@ public class SumParser extends NumericValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new SumAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new SumAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsParser.java index 70c370bff69..7f03816e288 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsParser.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregator; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.search.fetch.FieldsParseElement; @@ -206,8 +205,8 @@ public class TopHitsParser implements Aggregator.Parser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new TopHitsAggregator.Factory(null) }; + public AggregatorFactory getFactoryPrototypes() { + return new TopHitsAggregator.Factory(null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java index 08ce4bf0667..1ea07659892 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountParser.java @@ -58,7 +58,7 @@ public class ValueCountParser extends AnyValuesSourceParser { } @Override - public AggregatorFactory[] getFactoryPrototypes() { - return new AggregatorFactory[] { new ValueCountAggregator.Factory(null, null) }; + public AggregatorFactory getFactoryPrototypes() { + return new ValueCountAggregator.Factory(null, null); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilder.java deleted file mode 100644 index 56ae321712f..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilder.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; - -import java.io.IOException; -import java.util.Map; - -/** - * A base class for all pipeline aggregator builders. - */ -public abstract class PipelineAggregatorBuilder> extends AbstractAggregationBuilder { - - private String[] bucketsPaths; - private Map metaData; - - /** - * Sole constructor, typically used by sub-classes. - */ - protected PipelineAggregatorBuilder(String name, String type) { - super(name, type); - } - - /** - * Sets the paths to the buckets to use for this pipeline aggregator - */ - public B setBucketsPaths(String... bucketsPaths) { - this.bucketsPaths = bucketsPaths; - return (B) this; - } - - /** - * Sets the meta data to be included in the pipeline aggregator's response - */ - public B setMetaData(Map metaData) { - this.metaData = metaData; - return (B)this; - } - - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(getName()); - - if (this.metaData != null) { - builder.field("meta", this.metaData); - } - builder.startObject(type); - - if (bucketsPaths != null) { - builder.startArray(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()); - for (String path : bucketsPaths) { - builder.value(path); - } - builder.endArray(); - } - - internalXContent(builder, params); - - builder.endObject(); - - return builder.endObject(); - } - - protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException; -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java index 6fbc6f8c6d6..adaba999cf4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java @@ -19,74 +19,87 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeBuilder; -import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgBuilder; -import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffBuilder; +import org.elasticsearch.script.Script; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregator; + +import java.util.Map; public final class PipelineAggregatorBuilders { private PipelineAggregatorBuilders() { } - public static final DerivativeBuilder derivative(String name) { - return new DerivativeBuilder(name); + public static final DerivativePipelineAggregator.Factory derivative(String name, String bucketsPath) { + return new DerivativePipelineAggregator.Factory(name, bucketsPath); } - public static final MaxBucketBuilder maxBucket(String name) { - return new MaxBucketBuilder(name); + public static final MaxBucketPipelineAggregator.Factory maxBucket(String name, String bucketsPath) { + return new MaxBucketPipelineAggregator.Factory(name, bucketsPath); } - public static final MinBucketBuilder minBucket(String name) { - return new MinBucketBuilder(name); + public static final MinBucketPipelineAggregator.Factory minBucket(String name, String bucketsPath) { + return new MinBucketPipelineAggregator.Factory(name, bucketsPath); } - public static final AvgBucketBuilder avgBucket(String name) { - return new AvgBucketBuilder(name); + public static final AvgBucketPipelineAggregator.Factory avgBucket(String name, String bucketsPath) { + return new AvgBucketPipelineAggregator.Factory(name, bucketsPath); } - public static final SumBucketBuilder sumBucket(String name) { - return new SumBucketBuilder(name); + public static final SumBucketPipelineAggregator.Factory sumBucket(String name, String bucketsPath) { + return new SumBucketPipelineAggregator.Factory(name, bucketsPath); } - public static final StatsBucketBuilder statsBucket(String name) { - return new StatsBucketBuilder(name); + public static final StatsBucketPipelineAggregator.Factory statsBucket(String name, String bucketsPath) { + return new StatsBucketPipelineAggregator.Factory(name, bucketsPath); } - public static final ExtendedStatsBucketBuilder extendedStatsBucket(String name) { - return new ExtendedStatsBucketBuilder(name); + public static final ExtendedStatsBucketPipelineAggregator.Factory extendedStatsBucket(String name, String bucketsPath) { + return new ExtendedStatsBucketPipelineAggregator.Factory(name, bucketsPath); } - public static final PercentilesBucketBuilder percentilesBucket(String name) { - return new PercentilesBucketBuilder(name); + public static final PercentilesBucketPipelineAggregator.Factory percentilesBucket(String name, String bucketsPath) { + return new PercentilesBucketPipelineAggregator.Factory(name, bucketsPath); } - public static final MovAvgBuilder movingAvg(String name) { - return new MovAvgBuilder(name); + public static final MovAvgPipelineAggregator.Factory movingAvg(String name, String bucketsPath) { + return new MovAvgPipelineAggregator.Factory(name, bucketsPath); } - public static final BucketScriptBuilder bucketScript(String name) { - return new BucketScriptBuilder(name); + public static final BucketScriptPipelineAggregator.Factory bucketScript(String name, Map bucketsPathsMap, + Script script) { + return new BucketScriptPipelineAggregator.Factory(name, bucketsPathsMap, script); } - public static final BucketSelectorBuilder having(String name) { - return new BucketSelectorBuilder(name); + public static final BucketScriptPipelineAggregator.Factory bucketScript(String name, Script script, String... bucketsPaths) { + return new BucketScriptPipelineAggregator.Factory(name, script, bucketsPaths); } - public static final CumulativeSumBuilder cumulativeSum(String name) { - return new CumulativeSumBuilder(name); + public static final BucketSelectorPipelineAggregator.Factory bucketSelector(String name, Map bucketsPathsMap, + Script script) { + return new BucketSelectorPipelineAggregator.Factory(name, bucketsPathsMap, script); } - public static final SerialDiffBuilder diff(String name) { - return new SerialDiffBuilder(name); + public static final BucketSelectorPipelineAggregator.Factory bucketSelector(String name, Script script, String... bucketsPaths) { + return new BucketSelectorPipelineAggregator.Factory(name, script, bucketsPaths); + } + + public static final CumulativeSumPipelineAggregator.Factory cumulativeSum(String name, String bucketsPath) { + return new CumulativeSumPipelineAggregator.Factory(name, bucketsPath); + } + + public static final SerialDiffPipelineAggregator.Factory diff(String name, String bucketsPath) { + return new SerialDiffPipelineAggregator.Factory(name, bucketsPath); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorFactory.java index 70f34de282f..87d078054b5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorFactory.java @@ -110,11 +110,8 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem out.writeMap(metaData); } - // NORELEASE make this abstract when agg refactor complete - protected void doWriteTo(StreamOutput out) throws IOException { - } + protected abstract void doWriteTo(StreamOutput out) throws IOException; - // NORELEASE remove this method when agg refactor complete @Override public String getWriteableName() { return type; @@ -129,10 +126,7 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem return factory; } - // NORELEASE make this abstract when agg refactor complete - protected PipelineAggregatorFactory doReadFrom(String name, String[] bucketsPaths, StreamInput in) throws IOException { - return null; - } + protected abstract PipelineAggregatorFactory doReadFrom(String name, String[] bucketsPaths, StreamInput in) throws IOException; @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { @@ -166,21 +160,14 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem return false; } - // NORELEASE make this method abstract when agg refactor complete - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - return builder; - } + protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException; @Override public int hashCode() { return Objects.hash(Arrays.hashCode(bucketsPaths), metaData, name, type, doHashCode()); } - // NORELEASE make this method abstract here when agg refactor complete (so - // that subclasses are forced to implement it) - protected int doHashCode() { - return 0; - } + protected abstract int doHashCode(); @Override public boolean equals(Object obj) { @@ -200,10 +187,6 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem return doEquals(obj); } - // NORELEASE make this method abstract here when agg refactor complete (so - // that subclasses are forced to implement it) - protected boolean doEquals(Object obj) { - return true; - } + protected abstract boolean doEquals(Object obj); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java deleted file mode 100644 index 88a1f42b174..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketParser; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeParser; - -import java.io.IOException; - -/** - * A builder for building requests for a {@link BucketMetricsPipelineAggregator} - */ -public abstract class BucketMetricsBuilder> extends PipelineAggregatorBuilder { - - private String format; - private GapPolicy gapPolicy; - - public BucketMetricsBuilder(String name, String type) { - super(name, type); - } - - public B format(String format) { - this.format = format; - return (B) this; - } - - public B gapPolicy(GapPolicy gapPolicy) { - this.gapPolicy = gapPolicy; - return (B) this; - } - - @Override - protected final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - if (format != null) { - builder.field(MinBucketParser.FORMAT.getPreferredName(), format); - } - if (gapPolicy != null) { - builder.field(DerivativeParser.GAP_POLICY.getPreferredName(), gapPolicy.getName()); - } - doInternalXContent(builder, params); - return builder; - } - - protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java index 4fe8eaeff9e..db1f62bcf36 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java @@ -91,7 +91,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser { BucketMetricsFactory factory = null; try { - factory = buildFactory(pipelineAggregatorName, bucketsPaths, leftover); + factory = buildFactory(pipelineAggregatorName, bucketsPaths[0], leftover); if (format != null) { factory.format(format); } @@ -112,7 +112,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser { return factory; } - protected abstract BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, + protected abstract BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPaths, Map unparsedParams) throws ParseException; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketBuilder.java deleted file mode 100644 index 627cded5932..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketBuilder.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg; - -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; - -public class AvgBucketBuilder extends BucketMetricsBuilder { - - public AvgBucketBuilder(String name) { - super(name, AvgBucketPipelineAggregator.TYPE.name()); - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java index 4589e740072..431be6d892e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java @@ -32,8 +32,8 @@ public class AvgBucketParser extends BucketMetricsParser { } @Override - protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map unparsedParams) { - return new AvgBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map unparsedParams) { + return new AvgBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPath); } @Override public PipelineAggregatorFactory getFactoryPrototype() { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregator.java index b7625b24715..58b26dfffef 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregator.java @@ -91,7 +91,11 @@ public class AvgBucketPipelineAggregator extends BucketMetricsPipelineAggregator public static class Factory extends BucketMetricsFactory { - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketBuilder.java deleted file mode 100644 index 8b214d35f3e..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketBuilder.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max; - -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; - -public class MaxBucketBuilder extends BucketMetricsBuilder { - - public MaxBucketBuilder(String name) { - super(name, MaxBucketPipelineAggregator.TYPE.name()); - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java index 9114ade6882..c75927ecba4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java @@ -33,8 +33,8 @@ public class MaxBucketParser extends BucketMetricsParser { } @Override - protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map unparsedParams) { - return new MaxBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map unparsedParams) { + return new MaxBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPath); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java index c8b1007b0d4..739aef84636 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java @@ -98,7 +98,11 @@ public class MaxBucketPipelineAggregator extends BucketMetricsPipelineAggregator public static class Factory extends BucketMetricsFactory { - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketBuilder.java deleted file mode 100644 index 327bf4e063f..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketBuilder.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min; - -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; - - -public class MinBucketBuilder extends BucketMetricsBuilder { - - public MinBucketBuilder(String name) { - super(name, MinBucketPipelineAggregator.TYPE.name()); - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java index 474bef7d629..3e6e7e17f62 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java @@ -33,8 +33,8 @@ public class MinBucketParser extends BucketMetricsParser { } @Override - protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map unparsedParams) { - return new MinBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map unparsedParams) { + return new MinBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPath); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java index 685a5f5850c..dfbe3e337bb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java @@ -99,7 +99,11 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator public static class Factory extends BucketMetricsFactory { - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java deleted file mode 100644 index 9293e146185..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; - -import java.io.IOException; - -public class PercentilesBucketBuilder extends BucketMetricsBuilder { - - Double[] percents; - - public PercentilesBucketBuilder(String name) { - super(name, PercentilesBucketPipelineAggregator.TYPE.name()); - } - - public PercentilesBucketBuilder percents(Double[] percents) { - this.percents = percents; - return this; - } - - @Override - protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (percents != null) { - builder.field(PercentilesBucketParser.PERCENTS.getPreferredName(), (Object[])percents); - } - } - - -} - diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java index 36babbe5f81..fabe7184b54 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java @@ -39,7 +39,7 @@ public class PercentilesBucketParser extends BucketMetricsParser { } @Override - protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map unparsedParams) + protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map unparsedParams) throws ParseException { double[] percents = null; @@ -66,7 +66,7 @@ public class PercentilesBucketParser extends BucketMetricsParser { } PercentilesBucketPipelineAggregator.Factory factory = new PercentilesBucketPipelineAggregator.Factory(pipelineAggregatorName, - bucketsPaths); + bucketsPath); if (percents != null) { factory.percents(percents); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java index 00918baf987..d45078a60d7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java @@ -128,7 +128,11 @@ public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAg private double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 }; - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketBuilder.java deleted file mode 100644 index a8c19db8079..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketBuilder.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats; - -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; - -public class StatsBucketBuilder extends BucketMetricsBuilder { - - public StatsBucketBuilder(String name) { - super(name, StatsBucketPipelineAggregator.TYPE.name()); - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketParser.java index 11830621001..41a742c8d0e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketParser.java @@ -32,8 +32,8 @@ public class StatsBucketParser extends BucketMetricsParser { } @Override - protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map unparsedParams) { - return new StatsBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map unparsedParams) { + return new StatsBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPath); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregator.java index c51ac3e7051..1b63ada93e6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregator.java @@ -97,7 +97,11 @@ public class StatsBucketPipelineAggregator extends BucketMetricsPipelineAggregat public static class Factory extends BucketMetricsFactory { - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketBuilder.java deleted file mode 100644 index 25880bdcabe..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketBuilder.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; - -import java.io.IOException; - -public class ExtendedStatsBucketBuilder extends BucketMetricsBuilder { - - Double sigma; - - public ExtendedStatsBucketBuilder(String name) { - super(name, ExtendedStatsBucketPipelineAggregator.TYPE.name()); - } - - public ExtendedStatsBucketBuilder sigma(Double sigma) { - this.sigma = sigma; - return this; - } - - @Override - protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { - if (sigma != null) { - builder.field(ExtendedStatsBucketParser.SIGMA.getPreferredName(), sigma); - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketParser.java index 2308030a705..89457a980cc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketParser.java @@ -36,7 +36,7 @@ public class ExtendedStatsBucketParser extends BucketMetricsParser { } @Override - protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map unparsedParams) + protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map unparsedParams) throws ParseException { Double sigma = null; @@ -52,7 +52,7 @@ public class ExtendedStatsBucketParser extends BucketMetricsParser { } } ExtendedStatsBucketPipelineAggregator.Factory factory = new ExtendedStatsBucketPipelineAggregator.Factory(pipelineAggregatorName, - bucketsPaths); + bucketsPath); if (sigma != null) { factory.sigma(sigma); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregator.java index 1adec45ce8b..f58d03ce174 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregator.java @@ -105,7 +105,11 @@ public class ExtendedStatsBucketPipelineAggregator extends BucketMetricsPipeline private double sigma = 2.0; - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketBuilder.java deleted file mode 100644 index 5b2201c3f1e..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketBuilder.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum; - -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; - -public class SumBucketBuilder extends BucketMetricsBuilder { - - public SumBucketBuilder(String name) { - super(name, SumBucketPipelineAggregator.TYPE.name()); - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java index f318c754f02..bcf08b3bf35 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java @@ -32,8 +32,8 @@ public class SumBucketParser extends BucketMetricsParser { } @Override - protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map unparsedParams) { - return new SumBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map unparsedParams) { + return new SumBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPath); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregator.java index e8a6d90ef1b..36f6c5ab873 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregator.java @@ -87,7 +87,11 @@ public class SumBucketPipelineAggregator extends BucketMetricsPipelineAggregator public static class Factory extends BucketMetricsFactory { - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptBuilder.java deleted file mode 100644 index ee5fa94eede..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptBuilder.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.bucketscript; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; - -import java.io.IOException; -import java.util.Map; - -public class BucketScriptBuilder extends PipelineAggregatorBuilder { - - private String format; - private GapPolicy gapPolicy; - private Script script; - private Map bucketsPathsMap; - - public BucketScriptBuilder(String name) { - super(name, BucketScriptPipelineAggregator.TYPE.name()); - } - - public BucketScriptBuilder script(Script script) { - this.script = script; - return this; - } - - public BucketScriptBuilder format(String format) { - this.format = format; - return this; - } - - public BucketScriptBuilder gapPolicy(GapPolicy gapPolicy) { - this.gapPolicy = gapPolicy; - return this; - } - - /** - * Sets the paths to the buckets to use for this pipeline aggregator - */ - public BucketScriptBuilder setBucketsPathsMap(Map bucketsPathsMap) { - this.bucketsPathsMap = bucketsPathsMap; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params builderParams) throws IOException { - if (script != null) { - builder.field(ScriptField.SCRIPT.getPreferredName(), script); - } - if (format != null) { - builder.field(BucketScriptParser.FORMAT.getPreferredName(), format); - } - if (gapPolicy != null) { - builder.field(BucketScriptParser.GAP_POLICY.getPreferredName(), gapPolicy.getName()); - } - if (bucketsPathsMap != null) { - builder.field(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName(), bucketsPathsMap); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index 1d519ba6c67..7e487a91d65 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -173,6 +173,18 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { this.script = script; } + public Factory(String name, Script script, String... bucketsPaths) { + this(name, convertToBucketsPathMap(bucketsPaths), script); + } + + private static Map convertToBucketsPathMap(String[] bucketsPaths) { + Map bucketsPathsMap = new HashMap<>(); + for (int i = 0; i < bucketsPaths.length; i++) { + bucketsPathsMap.put("_value" + i, bucketsPaths[i]); + } + return bucketsPathsMap; + } + /** * Sets the format to use on the output of this aggregation. */ diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumBuilder.java deleted file mode 100644 index 282ded8db61..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumBuilder.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.cumulativesum; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; - -import java.io.IOException; - -public class CumulativeSumBuilder extends PipelineAggregatorBuilder { - - private String format; - - public CumulativeSumBuilder(String name) { - super(name, CumulativeSumPipelineAggregator.TYPE.name()); - } - - public CumulativeSumBuilder format(String format) { - this.format = format; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - if (format != null) { - builder.field(CumulativeSumParser.FORMAT.getPreferredName(), format); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumParser.java index 9843e87f0b1..30a6b21935e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumParser.java @@ -84,7 +84,8 @@ public class CumulativeSumParser implements PipelineAggregator.Parser { + "] for derivative aggregation [" + pipelineAggregatorName + "]"); } - CumulativeSumPipelineAggregator.Factory factory = new CumulativeSumPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + CumulativeSumPipelineAggregator.Factory factory = new CumulativeSumPipelineAggregator.Factory(pipelineAggregatorName, + bucketsPaths[0]); if (format != null) { factory.format(format); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java index fd98c6e3286..5ecc6813f4c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java @@ -115,7 +115,11 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator { private String format; - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java deleted file mode 100644 index 50b4578346d..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeBuilder.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.derivative; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; - -import java.io.IOException; - -public class DerivativeBuilder extends PipelineAggregatorBuilder { - - private String format; - private GapPolicy gapPolicy; - private String unit; - - public DerivativeBuilder(String name) { - super(name, DerivativePipelineAggregator.TYPE.name()); - } - - public DerivativeBuilder format(String format) { - this.format = format; - return this; - } - - public DerivativeBuilder gapPolicy(GapPolicy gapPolicy) { - this.gapPolicy = gapPolicy; - return this; - } - - public DerivativeBuilder unit(String unit) { - this.unit = unit; - return this; - } - - /** - * Sets the unit using the provided {@link DateHistogramInterval}. This - * method is only useful when calculating the derivative using a - * `date_histogram` - */ - public DerivativeBuilder unit(DateHistogramInterval unit) { - this.unit = unit.toString(); - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - if (format != null) { - builder.field(DerivativeParser.FORMAT.getPreferredName(), format); - } - if (gapPolicy != null) { - builder.field(DerivativeParser.GAP_POLICY.getPreferredName(), gapPolicy.getName()); - } - if (unit != null) { - builder.field(DerivativeParser.UNIT.getPreferredName(), unit); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeParser.java index dd27914f4de..7d6abdcf703 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativeParser.java @@ -91,7 +91,7 @@ public class DerivativeParser implements PipelineAggregator.Parser { + "] for derivative aggregation [" + pipelineAggregatorName + "]"); } - DerivativePipelineAggregator.Factory factory = new DerivativePipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + DerivativePipelineAggregator.Factory factory = new DerivativePipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths[0]); if (format != null) { factory.format(format); } @@ -99,8 +99,8 @@ public class DerivativeParser implements PipelineAggregator.Parser { factory.gapPolicy(gapPolicy); } if (units != null) { - factory.units(units); - } + factory.unit(units); + } return factory; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java index e71a4f17260..1ea4cc5ffba 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java @@ -30,6 +30,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; @@ -163,7 +164,11 @@ public class DerivativePipelineAggregator extends PipelineAggregator { private GapPolicy gapPolicy = GapPolicy.SKIP; private String units; - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } @@ -185,12 +190,17 @@ public class DerivativePipelineAggregator extends PipelineAggregator { return gapPolicy; } - public Factory units(String units) { + public Factory unit(String units) { this.units = units; return this; } - public String units() { + public Factory unit(DateHistogramInterval units) { + this.units = units.toString(); + return this; + } + + public String unit() { return units; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorBuilder.java deleted file mode 100644 index c291c632016..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorBuilder.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.having; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.Script.ScriptField; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; - -import java.io.IOException; -import java.util.Map; - -public class BucketSelectorBuilder extends PipelineAggregatorBuilder { - - private GapPolicy gapPolicy; - private Script script; - private Map bucketsPathsMap; - - public BucketSelectorBuilder(String name) { - super(name, BucketSelectorPipelineAggregator.TYPE.name()); - } - - public BucketSelectorBuilder script(Script script) { - this.script = script; - return this; - } - - public BucketSelectorBuilder gapPolicy(GapPolicy gapPolicy) { - this.gapPolicy = gapPolicy; - return this; - } - - /** - * Sets the paths to the buckets to use for this pipeline aggregator. The - * map given to this method must contain script variable name as keys with - * bucket paths values to the metrics to use for each variable. - */ - public BucketSelectorBuilder setBucketsPathsMap(Map bucketsPathsMap) { - this.bucketsPathsMap = bucketsPathsMap; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params builderParams) throws IOException { - if (script != null) { - builder.field(ScriptField.SCRIPT.getPreferredName(), script); - } - if (gapPolicy != null) { - builder.field(BucketSelectorParser.GAP_POLICY.getPreferredName(), gapPolicy.getName()); - } - if (bucketsPathsMap != null) { - builder.field(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName(), bucketsPathsMap); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java index b81e0dae7a0..dfb47894705 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/having/BucketSelectorPipelineAggregator.java @@ -151,6 +151,18 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator { this.script = script; } + public Factory(String name, Script script, String... bucketsPaths) { + this(name, convertToBucketsPathMap(bucketsPaths), script); + } + + private static Map convertToBucketsPathMap(String[] bucketsPaths) { + Map bucketsPathsMap = new HashMap<>(); + for (int i = 0; i < bucketsPaths.length; i++) { + bucketsPathsMap.put("_value" + i, bucketsPaths[i]); + } + return bucketsPathsMap; + } + /** * Sets the gap policy to use for this aggregation. */ diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgBuilder.java deleted file mode 100644 index b2dc718d47a..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgBuilder.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.movavg; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; - -import java.io.IOException; -import java.util.Map; - -/** - * A builder to create MovingAvg pipeline aggregations - */ -public class MovAvgBuilder extends PipelineAggregatorBuilder { - - private String format; - private GapPolicy gapPolicy; - private MovAvgModelBuilder modelBuilder; - private Integer window; - private Integer predict; - private Boolean minimize; - private Map settings; - - public MovAvgBuilder(String name) { - super(name, MovAvgPipelineAggregator.TYPE.name()); - } - - public MovAvgBuilder format(String format) { - this.format = format; - return this; - } - - /** - * Defines what should be done when a gap in the series is discovered - * - * @param gapPolicy A GapPolicy enum defining the selected policy - * @return Returns the builder to continue chaining - */ - public MovAvgBuilder gapPolicy(GapPolicy gapPolicy) { - this.gapPolicy = gapPolicy; - return this; - } - - /** - * Sets a MovAvgModelBuilder for the Moving Average. The model builder is used to - * define what type of moving average you want to use on the series - * - * @param modelBuilder A MovAvgModelBuilder which has been prepopulated with settings - * @return Returns the builder to continue chaining - */ - public MovAvgBuilder modelBuilder(MovAvgModelBuilder modelBuilder) { - this.modelBuilder = modelBuilder; - return this; - } - - /** - * Sets the window size for the moving average. This window will "slide" across the - * series, and the values inside that window will be used to calculate the moving avg value - * - * @param window Size of window - * @return Returns the builder to continue chaining - */ - public MovAvgBuilder window(int window) { - this.window = window; - return this; - } - - /** - * Sets the number of predictions that should be returned. Each prediction will be spaced at - * the intervals specified in the histogram. E.g "predict: 2" will return two new buckets at the - * end of the histogram with the predicted values. - * - * @param numPredictions Number of predictions to make - * @return Returns the builder to continue chaining - */ - public MovAvgBuilder predict(int numPredictions) { - this.predict = numPredictions; - return this; - } - - /** - * Determines if the model should be fit to the data using a cost - * minimizing algorithm. - * - * @param minimize If the model should be fit to the underlying data - * @return Returns the builder to continue chaining - */ - public MovAvgBuilder minimize(boolean minimize) { - this.minimize = minimize; - return this; - } - - /** - * The hash of settings that should be provided to the model when it is - * instantiated - */ - public MovAvgBuilder settings(Map settings) { - this.settings = settings; - return this; - } - - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - if (format != null) { - builder.field(MovAvgParser.FORMAT.getPreferredName(), format); - } - if (gapPolicy != null) { - builder.field(MovAvgParser.GAP_POLICY.getPreferredName(), gapPolicy.getName()); - } - if (modelBuilder != null) { - modelBuilder.toXContent(builder, params); - } - if (window != null) { - builder.field(MovAvgParser.WINDOW.getPreferredName(), window); - } - if (predict != null) { - builder.field(MovAvgParser.PREDICT.getPreferredName(), predict); - } - if (minimize != null) { - builder.field(MovAvgParser.MINIMIZE.getPreferredName(), minimize); - } - if (settings != null) { - builder.field(MovAvgParser.SETTINGS.getPreferredName(), settings); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java index 566eb92fc99..62bcf612193 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgParser.java @@ -141,7 +141,7 @@ public class MovAvgParser implements PipelineAggregator.Parser { + "] for movingAvg aggregation [" + pipelineAggregatorName + "]"); } - MovAvgPipelineAggregator.Factory factory = new MovAvgPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths); + MovAvgPipelineAggregator.Factory factory = new MovAvgPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths[0]); if (format != null) { factory.format(format); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index c56ca26fb56..dcb7ae72cb1 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; +import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelStreams; import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import org.elasticsearch.search.aggregations.support.format.ValueFormat; @@ -287,7 +288,11 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { private int predict = 0; private Boolean minimize; - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } @@ -351,6 +356,18 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { return window; } + /** + * Sets a MovAvgModel for the Moving Average. The model is used to + * define what type of moving average you want to use on the series + * + * @param model + * A MovAvgModel which has been prepopulated with settings + */ + public Factory modelBuilder(MovAvgModelBuilder model) { + this.model = model.build(); + return this; + } + /** * Sets a MovAvgModel for the Moving Average. The model is used to * define what type of moving average you want to use on the series @@ -381,6 +398,9 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { * Number of predictions to make */ public Factory predict(int predict) { + if (predict <= 0) { + throw new IllegalArgumentException("predict must be greater than 0. Found [" + predict + "] in [" + name + "]"); + } this.predict = predict; return this; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java index c424de86aa1..edbfa66584e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java @@ -170,7 +170,7 @@ public class EwmaModel extends MovAvgModel { public static class EWMAModelBuilder implements MovAvgModelBuilder { - private Double alpha; + private double alpha = DEFAULT_ALPHA; /** * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values @@ -190,13 +190,16 @@ public class EwmaModel extends MovAvgModel { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName()); builder.startObject(MovAvgParser.SETTINGS.getPreferredName()); - if (alpha != null) { - builder.field("alpha", alpha); - } + builder.field("alpha", alpha); builder.endObject(); return builder; } + + @Override + public MovAvgModel build() { + return new EwmaModel(alpha); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java index 8734b71ec4e..06ce050d225 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java @@ -210,7 +210,7 @@ public class HoltLinearModel extends MovAvgModel { return false; } HoltLinearModel other = (HoltLinearModel) obj; - return Objects.equals(alpha, other.alpha) + return Objects.equals(alpha, other.alpha) && Objects.equals(beta, other.beta); } @@ -235,8 +235,8 @@ public class HoltLinearModel extends MovAvgModel { public static class HoltLinearModelBuilder implements MovAvgModelBuilder { - private Double alpha; - private Double beta; + private double alpha = DEFAULT_ALPHA; + private double beta = DEFAULT_BETA; /** * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values @@ -268,18 +268,17 @@ public class HoltLinearModel extends MovAvgModel { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName()); builder.startObject(MovAvgParser.SETTINGS.getPreferredName()); - - if (alpha != null) { - builder.field("alpha", alpha); - } - - if (beta != null) { - builder.field("beta", beta); - } + builder.field("alpha", alpha); + builder.field("beta", beta); builder.endObject(); return builder; } + + @Override + public MovAvgModel build() { + return new HoltLinearModel(alpha, beta); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java index 9f5ecad4b1b..be8d2fb73b6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java @@ -389,7 +389,7 @@ public class HoltWintersModel extends MovAvgModel { return false; } HoltWintersModel other = (HoltWintersModel) obj; - return Objects.equals(alpha, other.alpha) + return Objects.equals(alpha, other.alpha) && Objects.equals(beta, other.beta) && Objects.equals(gamma, other.gamma) && Objects.equals(period, other.period) @@ -443,12 +443,12 @@ public class HoltWintersModel extends MovAvgModel { public static class HoltWintersModelBuilder implements MovAvgModelBuilder { - private Double alpha; - private Double beta; - private Double gamma; - private Integer period; - private SeasonalityType seasonalityType; - private Boolean pad; + private double alpha = DEFAULT_ALPHA; + private double beta = DEFAULT_BETA; + private double gamma = DEFAULT_GAMMA; + private int period = DEFAULT_PERIOD; + private SeasonalityType seasonalityType = DEFAULT_SEASONALITY_TYPE; + private Boolean pad = null; /** * Alpha controls the smoothing of the data. Alpha = 1 retains no memory of past values @@ -500,34 +500,24 @@ public class HoltWintersModel extends MovAvgModel { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName()); builder.startObject(MovAvgParser.SETTINGS.getPreferredName()); - - if (alpha != null) { - builder.field("alpha", alpha); - } - - if (beta != null) { - builder.field("beta", beta); - } - - if (gamma != null) { - builder.field("gamma", gamma); - } - - if (period != null) { - builder.field("period", period); - } - + builder.field("alpha", alpha); + builder.field("beta", beta); + builder.field("gamma", gamma); + builder.field("period", period); if (pad != null) { builder.field("pad", pad); } - - if (seasonalityType != null) { - builder.field("type", seasonalityType.getName()); - } + builder.field("type", seasonalityType.getName()); builder.endObject(); return builder; } + + @Override + public MovAvgModel build() { + boolean pad = this.pad == null ? (seasonalityType == SeasonalityType.MULTIPLICATIVE) : this.pad; + return new HoltWintersModel(alpha, beta, gamma, period, seasonalityType, pad); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java index a5dfddf3e90..bc3de8b6aff 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java @@ -132,6 +132,11 @@ public class LinearModel extends MovAvgModel { builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName()); return builder; } + + @Override + public MovAvgModel build() { + return new LinearModel(); + } } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelBuilder.java index e491d121bcb..759c493d858 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelBuilder.java @@ -26,4 +26,6 @@ import org.elasticsearch.common.xcontent.ToXContent; * average models are used by the MovAvg aggregation */ public interface MovAvgModelBuilder extends ToXContent { + + public MovAvgModel build(); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java index 619654e44f1..61f8c668e1a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java @@ -125,6 +125,11 @@ public class SimpleModel extends MovAvgModel { builder.field(MovAvgParser.MODEL.getPreferredName(), NAME_FIELD.getPreferredName()); return builder; } + + @Override + public MovAvgModel build() { + return new SimpleModel(); + } } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffBuilder.java deleted file mode 100644 index 052f3f02b28..00000000000 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffBuilder.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.pipeline.serialdiff; - -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder; - -import java.io.IOException; - -public class SerialDiffBuilder extends PipelineAggregatorBuilder { - - private String format; - private GapPolicy gapPolicy; - private Integer lag; - - public SerialDiffBuilder(String name) { - super(name, SerialDiffPipelineAggregator.TYPE.name()); - } - - public SerialDiffBuilder format(String format) { - this.format = format; - return this; - } - - public SerialDiffBuilder gapPolicy(GapPolicy gapPolicy) { - this.gapPolicy = gapPolicy; - return this; - } - - public SerialDiffBuilder lag(Integer lag) { - this.lag = lag; - return this; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - if (format != null) { - builder.field(SerialDiffParser.FORMAT.getPreferredName(), format); - } - if (gapPolicy != null) { - builder.field(SerialDiffParser.GAP_POLICY.getPreferredName(), gapPolicy.getName()); - } - if (lag != null) { - builder.field(SerialDiffParser.LAG.getPreferredName(), lag); - } - return builder; - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffParser.java index 9b48d1c3faf..fd9abc21341 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffParser.java @@ -101,7 +101,7 @@ public class SerialDiffParser implements PipelineAggregator.Parser { "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for derivative aggregation [" + reducerName + "]"); } - SerialDiffPipelineAggregator.Factory factory = new SerialDiffPipelineAggregator.Factory(reducerName, bucketsPaths); + SerialDiffPipelineAggregator.Factory factory = new SerialDiffPipelineAggregator.Factory(reducerName, bucketsPaths[0]); if (lag != null) { factory.lag(lag); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java index db42812b4e9..1bf4f820e23 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java @@ -151,7 +151,11 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator { private GapPolicy gapPolicy = GapPolicy.SKIP; private int lag = 1; - public Factory(String name, String[] bucketsPaths) { + public Factory(String name, String bucketsPath) { + this(name, new String[] { bucketsPath }); + } + + private Factory(String name, String[] bucketsPaths) { super(name, TYPE.name(), bucketsPaths); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java index bdf1e55a4fb..688e447cf58 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValueType.java @@ -35,17 +35,19 @@ import java.io.IOException; public enum ValueType implements Writeable { @Deprecated - ANY((byte) 0, "any", ValuesSourceType.ANY, IndexFieldData.class, ValueFormat.RAW), STRING((byte) 1, "string", ValuesSourceType.BYTES, + ANY((byte) 0, "any", "any", ValuesSourceType.ANY, IndexFieldData.class, ValueFormat.RAW), + STRING((byte) 1, "string", "string", ValuesSourceType.BYTES, IndexFieldData.class, ValueFormat.RAW), - LONG((byte) 2, "byte|short|integer|long", ValuesSourceType.NUMERIC, + LONG((byte) 2, "byte|short|integer|long", "long", + ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) { @Override public boolean isNumeric() { return true; } }, - DOUBLE((byte) 3, "float|double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) { + DOUBLE((byte) 3, "float|double", "double", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) { @Override public boolean isNumeric() { return true; @@ -56,31 +58,31 @@ public enum ValueType implements Writeable { return true; } }, - NUMBER((byte) 4, "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) { + NUMBER((byte) 4, "number", "number", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) { @Override public boolean isNumeric() { return true; } }, - DATE((byte) 5, "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.DateTime.DEFAULT) { + DATE((byte) 5, "date", "date", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.DateTime.DEFAULT) { @Override public boolean isNumeric() { return true; } }, - IP((byte) 6, "ip", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.IPv4) { + IP((byte) 6, "ip", "ip", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.IPv4) { @Override public boolean isNumeric() { return true; } }, - NUMERIC((byte) 7, "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) { + NUMERIC((byte) 7, "numeric", "numeric", ValuesSourceType.NUMERIC, IndexNumericFieldData.class, ValueFormat.RAW) { @Override public boolean isNumeric() { return true; } }, - GEOPOINT((byte) 8, "geo_point", ValuesSourceType.GEOPOINT, IndexGeoPointFieldData.class, ValueFormat.RAW) { + GEOPOINT((byte) 8, "geo_point", "geo_point", ValuesSourceType.GEOPOINT, IndexGeoPointFieldData.class, ValueFormat.RAW) { @Override public boolean isGeoPoint() { return true; @@ -92,11 +94,13 @@ public enum ValueType implements Writeable { final Class fieldDataType; final ValueFormat defaultFormat; private final byte id; + private String preferredName; - private ValueType(byte id, String description, ValuesSourceType valuesSourceType, Class fieldDataType, + private ValueType(byte id, String description, String preferredName, ValuesSourceType valuesSourceType, Class fieldDataType, ValueFormat defaultFormat) { this.id = id; this.description = description; + this.preferredName = preferredName; this.valuesSourceType = valuesSourceType; this.fieldDataType = fieldDataType; this.defaultFormat = defaultFormat; @@ -106,6 +110,10 @@ public enum ValueType implements Writeable { return description; } + public String getPreferredName() { + return preferredName; + } + public ValuesSourceType getValuesSourceType() { return valuesSourceType; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java index 68b33cc53c1..cdd3cf07d78 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java @@ -59,10 +59,6 @@ public abstract class ValuesSourceAggregatorFactory> extends ValuesSourceAggregatorFactory { - protected LeafOnly(String name, Type type, ValuesSourceParser.Input input) { - super(name, type, input); - } - protected LeafOnly(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name, type, valuesSourceType, targetValueType); } @@ -83,25 +79,6 @@ public abstract class ValuesSourceAggregatorFactory config; - // NORELEASE remove this method when aggs refactoring complete - /** - * This constructor remains here until all subclasses have been moved to the - * new constructor. This also means moving from using - * {@link ValuesSourceParser} to using {@link AbstractValuesSourceParser}. - */ - @Deprecated - protected ValuesSourceAggregatorFactory(String name, Type type, ValuesSourceParser.Input input) { - super(name, type); - this.valuesSourceType = input.valuesSourceType; - this.targetValueType = input.targetValueType; - this.field = input.field; - this.script = input.script; - this.valueType = input.valueType; - this.format = input.format; - this.missing = input.missing; - this.timeZone = input.timezone; - } - protected ValuesSourceAggregatorFactory(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { super(name, type); this.valuesSourceType = valuesSourceType; @@ -379,9 +356,7 @@ public abstract class ValuesSourceAggregatorFactory doReadFrom(String name, StreamInput in) throws IOException { @@ -406,14 +381,11 @@ public abstract class ValuesSourceAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType, ValueType targetValueType, - StreamInput in) throws IOException { - return null; - } + protected abstract ValuesSourceAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType, + ValueType targetValueType, StreamInput in) throws IOException; @Override - protected final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { + public final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (field != null) { builder.field("field", field); @@ -430,15 +402,15 @@ public abstract class ValuesSourceAggregatorFactory { - - static final ParseField TIME_ZONE = new ParseField("time_zone"); - - public static Builder any(String aggName, InternalAggregation.Type aggType, SearchContext context) { - return new Builder<>(aggName, aggType, context, ValuesSource.class, ValuesSourceType.ANY); - } - - public static Builder numeric(String aggName, InternalAggregation.Type aggType, SearchContext context) { - return new Builder<>(aggName, aggType, context, ValuesSource.Numeric.class, ValuesSourceType.NUMERIC) - .targetValueType(ValueType.NUMERIC); - } - - public static Builder bytes(String aggName, InternalAggregation.Type aggType, SearchContext context) { - return new Builder<>(aggName, aggType, context, ValuesSource.Bytes.class, ValuesSourceType.BYTES).targetValueType(ValueType.STRING); - } - - public static Builder geoPoint(String aggName, InternalAggregation.Type aggType, SearchContext context) { - return new Builder<>(aggName, aggType, context, ValuesSource.GeoPoint.class, ValuesSourceType.GEOPOINT).targetValueType( - ValueType.GEOPOINT).scriptable(false); - } - - // NORELEASE remove this class when aggs refactoring complete - /** - * @deprecated use {@link AbstractValuesSourceParser} instead. This class - * will be removed when aggs refactoring is complete. - */ - @Deprecated - public static class Input { - String field = null; - Script script = null; - @Deprecated - Map params = null; // TODO Remove in 3.0 - ValueType valueType = null; - String format = null; - Object missing = null; - ValuesSourceType valuesSourceType = null; - ValueType targetValueType = null; - DateTimeZone timezone = DateTimeZone.UTC; - - public boolean valid() { - return field != null || script != null; - } - - public DateTimeZone timezone() { - return this.timezone; - } - } - - private final String aggName; - private final InternalAggregation.Type aggType; - private final SearchContext context; - - private boolean scriptable = true; - private boolean formattable = false; - private boolean timezoneAware = false; - private ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); - - private Input input = new Input(); - - private ValuesSourceParser(String aggName, InternalAggregation.Type aggType, SearchContext context, ValuesSourceType valuesSourceType) { - this.aggName = aggName; - this.aggType = aggType; - this.context = context; - input.valuesSourceType = valuesSourceType; - } - - public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser) throws IOException { - if ("missing".equals(currentFieldName) && token.isValue()) { - input.missing = parser.objectText(); - return true; - } - if (token == XContentParser.Token.VALUE_STRING) { - if ("field".equals(currentFieldName)) { - input.field = parser.text(); - } else if (formattable && "format".equals(currentFieldName)) { - input.format = parser.text(); - } else if (timezoneAware && context.parseFieldMatcher().match(currentFieldName, TIME_ZONE)) { - input.timezone = DateTimeZone.forID(parser.text()); - } else if (scriptable) { - if ("value_type".equals(currentFieldName) || "valueType".equals(currentFieldName)) { - input.valueType = ValueType.resolveForScript(parser.text()); - if (input.targetValueType != null && input.valueType.isNotA(input.targetValueType)) { - throw new SearchParseException(context, aggType.name() + " aggregation [" + aggName - + "] was configured with an incompatible value type [" + input.valueType + "]. [" + aggType - + "] aggregation can only work on value of type [" + input.targetValueType + "]", parser.getTokenLocation()); - } - } else if (!scriptParameterParser.token(currentFieldName, token, parser, context.parseFieldMatcher())) { - return false; - } - return true; - } else { - return false; - } - return true; - } - if (token == XContentParser.Token.VALUE_NUMBER) { - if (timezoneAware && context.parseFieldMatcher().match(currentFieldName, TIME_ZONE)) { - input.timezone = DateTimeZone.forOffsetHours(parser.intValue()); - } else { - return false; - } - return true; - } - if (scriptable && token == XContentParser.Token.START_OBJECT) { - if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) { - input.script = Script.parse(parser, context.parseFieldMatcher()); - return true; - } else if ("params".equals(currentFieldName)) { - input.params = parser.map(); - return true; - } - return false; - } - - return false; - } - - public Input input() { - if (input.script == null) { // Didn't find anything using the new API so - // try using the old one instead - ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); - if (scriptValue != null) { - if (input.params == null) { - input.params = new HashMap<>(); - } - input.script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), input.params); - } - } - - return input; - } - - // NORELEASE remove this class when aggs refactoring complete - /** - * @deprecated use {@link AbstractValuesSourceParser} instead. This class - * will be removed when aggs refactoring is complete. - */ - @Deprecated - public static class Builder { - - private final ValuesSourceParser parser; - - private Builder(String aggName, InternalAggregation.Type aggType, SearchContext context, Class valuesSourcecClass, - ValuesSourceType valuesSourceType) { - parser = new ValuesSourceParser<>(aggName, aggType, context, valuesSourceType); - } - - public Builder scriptable(boolean scriptable) { - parser.scriptable = scriptable; - return this; - } - - public Builder formattable(boolean formattable) { - parser.formattable = formattable; - return this; - } - - public Builder timezoneAware(boolean timezoneAware) { - parser.timezoneAware = timezoneAware; - return this; - } - - public Builder targetValueType(ValueType valueType) { - parser.input.targetValueType = valueType; - return this; - } - - public ValuesSourceParser build() { - return parser; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 801ba22cbd6..59ffb0ad6fa 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -41,8 +41,8 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.HighlightBuilder; @@ -383,10 +383,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Add an aggregation to perform as part of the search. - * - * NORELEASE REMOVE WHEN AGG REFACTORING IS COMPLETE */ - public SearchSourceBuilder aggregation(AbstractAggregationBuilder aggregation) { + public SearchSourceBuilder aggregation(AggregatorFactory aggregation) { try { if (aggregations == null) { aggregations = new ArrayList<>(); @@ -405,7 +403,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ /** * Add an aggregation to perform as part of the search. */ - public SearchSourceBuilder aggregation(AggregatorFactory aggregation) { + public SearchSourceBuilder aggregation(PipelineAggregatorFactory aggregation) { try { if (aggregations == null) { aggregations = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java index 46402c6054a..65ba54dc458 100644 --- a/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java +++ b/core/src/test/java/org/elasticsearch/indices/cache/query/IndicesRequestCacheIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.test.ESIntegTestCase; +import org.joda.time.DateTimeZone; import java.util.List; @@ -50,15 +51,19 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { // which used to not work well with the query cache because of the handles stream output // see #9500 final SearchResponse r1 = client().prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation(dateHistogram("histo").field("f").timeZone("+01:00").minDocCount(0).interval(DateHistogramInterval.MONTH)).get(); + .addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00")).minDocCount(0) + .dateHistogramInterval(DateHistogramInterval.MONTH)) + .get(); assertSearchResponse(r1); // The cached is actually used assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMemorySizeInBytes(), greaterThan(0l)); for (int i = 0; i < 10; ++i) { - final SearchResponse r2 = client().prepareSearch("index").setSize(0).setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation(dateHistogram("histo").field("f").timeZone("+01:00").minDocCount(0).interval(DateHistogramInterval.MONTH)).get(); + final SearchResponse r2 = client().prepareSearch("index").setSize(0) + .setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(dateHistogram("histo").field("f") + .timeZone(DateTimeZone.forID("+01:00")).minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH)) + .get(); assertSearchResponse(r2); Histogram h1 = r1.getAggregations().get("histo"); Histogram h2 = r2.getAggregations().get("histo"); diff --git a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index b8ed2cc0e28..7bac10d9e74 100644 --- a/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/core/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -165,7 +165,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { percolateRequestBuilder.setOnlyCount(countOnly); } - percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_a").setBucketsPaths("a>_count")); + percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_a", "a>_count")); PercolateResponse response = percolateRequestBuilder.execute().actionGet(); assertMatchCount(response, expectedCount[i % numUniqueQueries]); @@ -245,7 +245,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase { percolateRequestBuilder.setOnlyCount(countOnly); } - percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_terms").setBucketsPaths("terms>_count")); + percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_terms", "terms>_count")); PercolateResponse response = percolateRequestBuilder.execute().actionGet(); assertMatchCount(response, numQueries); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java deleted file mode 100644 index 80227ff7f66..00000000000 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryIT.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations; - -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Requests; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; -import org.elasticsearch.test.ESIntegTestCase; - -import java.util.ArrayList; -import java.util.List; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.core.IsNull.notNullValue; - -@ESIntegTestCase.SuiteScopeTestCase -@AwaitsFix(bugUrl = "needs fixing after the search request refactor. Do we need agg binary?") -// NO RELEASE -public class AggregationsBinaryIT extends ESIntegTestCase { - - private static final String STRING_FIELD_NAME = "s_value"; - private static final String INT_FIELD_NAME = "i_value"; - - @Override - public void setupSuiteScopeCluster() throws Exception { - createIndex("idx"); - List builders = new ArrayList<>(); - for (int i = 0; i < 5; i++) { - builders.add(client().prepareIndex("idx", "type").setSource( - jsonBuilder().startObject().field(STRING_FIELD_NAME, "val" + i).field(INT_FIELD_NAME, i).endObject())); - } - indexRandom(true, builders); - ensureSearchable(); - } - - public void testAggregationsBinary() throws Exception { - TermsBuilder termsBuilder = AggregationBuilders.terms("terms").field(STRING_FIELD_NAME); - TermsBuilder subTerm = AggregationBuilders.terms("subterms").field(INT_FIELD_NAME); - - // Create an XContentBuilder from sub aggregation - XContentBuilder subTermContentBuilder = JsonXContent.contentBuilder().startObject(); - subTerm.toXContent(subTermContentBuilder, ToXContent.EMPTY_PARAMS); - subTermContentBuilder.endObject(); - - // Add sub aggregation as a XContentBuilder (binary_aggregation) - termsBuilder.subAggregation(subTermContentBuilder); - - SearchResponse response = client().prepareSearch("idx").setTypes("type").addAggregation(termsBuilder).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + i); - assertThat(bucket, notNullValue()); - assertThat(bucket.getKeyAsString(), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1l)); - Aggregations subAggs = bucket.getAggregations(); - assertThat(subAggs, notNullValue()); - assertThat(subAggs.asList().size(), equalTo(1)); - Terms subTerms = subAggs.get("subterms"); - assertThat(subTerms, notNullValue()); - List subTermsBuckets = subTerms.getBuckets(); - assertThat(subTermsBuckets, notNullValue()); - assertThat(subTermsBuckets.size(), equalTo(1)); - assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i)); - assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l)); - } - } - - public void testAggregationsBinarySameContentType() throws Exception { - TermsBuilder termsBuilder = AggregationBuilders.terms("terms").field(STRING_FIELD_NAME); - TermsBuilder subTerm = AggregationBuilders.terms("subterms").field(INT_FIELD_NAME); - - // Create an XContentBuilder from sub aggregation - - XContentBuilder subTermContentBuilder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); - subTermContentBuilder.startObject(); - subTerm.toXContent(subTermContentBuilder, ToXContent.EMPTY_PARAMS); - subTermContentBuilder.endObject(); - - // Add sub aggregation as a XContentBuilder (binary_aggregation) - termsBuilder.subAggregation(subTermContentBuilder); - - SearchResponse response = client().prepareSearch("idx").setTypes("type").addAggregation(termsBuilder).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + i); - assertThat(bucket, notNullValue()); - assertThat(bucket.getKeyAsString(), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1l)); - Aggregations subAggs = bucket.getAggregations(); - assertThat(subAggs, notNullValue()); - assertThat(subAggs.asList().size(), equalTo(1)); - Terms subTerms = subAggs.get("subterms"); - assertThat(subTerms, notNullValue()); - List subTermsBuckets = subTerms.getBuckets(); - assertThat(subTermsBuckets, notNullValue()); - assertThat(subTermsBuckets.size(), equalTo(1)); - assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i)); - assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l)); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java index 63008bc501f..1821d09fc85 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -147,7 +147,10 @@ public class MissingValueIT extends ESIntegTestCase { } public void testDateHistogram() { - SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("my_histogram").field("date").interval(DateHistogramInterval.YEAR).missing("2014-05-07")).get(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + dateHistogram("my_histogram").field("date").dateHistogramInterval(DateHistogramInterval.YEAR).missing("2014-05-07")) + .get(); assertSearchResponse(response); Histogram histogram = response.getAggregations().get("my_histogram"); assertEquals(2, histogram.getBuckets().size()); @@ -156,7 +159,10 @@ public class MissingValueIT extends ESIntegTestCase { assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(1).getKeyAsString()); assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - response = client().prepareSearch("idx").addAggregation(dateHistogram("my_histogram").field("date").interval(DateHistogramInterval.YEAR).missing("2015-05-07")).get(); + response = client().prepareSearch("idx") + .addAggregation( + dateHistogram("my_histogram").field("date").dateHistogramInterval(DateHistogramInterval.YEAR).missing("2015-05-07")) + .get(); assertSearchResponse(response); histogram = response.getAggregations().get("my_histogram"); assertEquals(1, histogram.getBuckets().size()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java index 715505d0626..868ca23c64e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java @@ -134,7 +134,8 @@ public class ChildrenIT extends ESIntegTestCase { .setQuery(matchQuery("randomized", true)) .addAggregation( terms("category").field("category").size(0).subAggregation( - children("to_comment").childType("comment").subAggregation( +children("to_comment", "comment") + .subAggregation( terms("commenters").field("commenter").size(0).subAggregation( topHits("top_comments") )) @@ -175,7 +176,7 @@ public class ChildrenIT extends ESIntegTestCase { .setQuery(matchQuery("randomized", false)) .addAggregation( terms("category").field("category").size(0).subAggregation( - children("to_comment").childType("comment").subAggregation(topHits("top_comments").sort("_uid", SortOrder.ASC)) + children("to_comment", "comment").subAggregation(topHits("top_comments").sort("_uid", SortOrder.ASC)) ) ).get(); assertSearchResponse(searchResponse); @@ -250,7 +251,7 @@ public class ChildrenIT extends ESIntegTestCase { for (int i = 0; i < 10; i++) { SearchResponse searchResponse = client().prepareSearch(indexName) - .addAggregation(children("children").childType("child").subAggregation(sum("counts").field("count"))) + .addAggregation(children("children", "child").subAggregation(sum("counts").field("count"))) .get(); assertNoFailures(searchResponse); @@ -279,7 +280,7 @@ public class ChildrenIT extends ESIntegTestCase { public void testNonExistingChildType() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") .addAggregation( - children("non-existing").childType("xyz") +children("non-existing", "xyz") ).get(); assertSearchResponse(searchResponse); @@ -319,8 +320,7 @@ public class ChildrenIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch(indexName).setTypes(masterType) .setQuery(hasChildQuery(childType, termQuery("color", "orange"))) - .addAggregation(children("my-refinements") - .childType(childType) +.addAggregation(children("my-refinements", childType) .subAggregation(terms("my-colors").field("color")) .subAggregation(terms("my-sizes").field("size")) ).get(); @@ -371,8 +371,7 @@ public class ChildrenIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch(indexName) .setQuery(matchQuery("name", "europe")) .addAggregation( - children(parentType).childType(parentType).subAggregation( - children(childType).childType(childType).subAggregation( + children(parentType, parentType).subAggregation(children(childType, childType).subAggregation( terms("name").field("name") ) ) @@ -420,7 +419,7 @@ public class ChildrenIT extends ESIntegTestCase { .setSize(0) .addAggregation(AggregationBuilders.terms("towns").field("town") .subAggregation(AggregationBuilders.terms("parent_names").field("name") - .subAggregation(AggregationBuilders.children("child_docs").childType("childType")) +.subAggregation(AggregationBuilders.children("child_docs", "childType")) ) ) .get(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index ef784b6f812..15ac49afa03 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -38,6 +38,7 @@ import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.max.Max; @@ -160,7 +161,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testSingleValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -195,7 +196,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testSingleValuedFieldWithTimeZone() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(1).timeZone("+01:00")).execute() + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(DateTimeZone.forID("+01:00"))).execute() .actionGet(); DateTimeZone tz = DateTimeZone.forID("+01:00"); assertSearchResponse(response); @@ -253,7 +254,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.KEY_ASC)) .execute().actionGet(); @@ -276,7 +277,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.KEY_DESC)) .execute().actionGet(); @@ -298,7 +299,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.COUNT_ASC)) .execute().actionGet(); @@ -320,7 +321,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.COUNT_DESC)) .execute().actionGet(); @@ -340,7 +341,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testSingleValuedFieldWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.MONTH) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.MONTH) .subAggregation(sum("sum").field("value"))) .execute().actionGet(); @@ -397,7 +398,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testSingleValuedFieldWithSubAggregationInherited() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.MONTH) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.MONTH) .subAggregation(max("max"))) .execute().actionGet(); @@ -444,7 +445,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.aggregation("sum", true)) .subAggregation(max("sum").field("value"))) .execute().actionGet(); @@ -467,7 +468,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.aggregation("sum", false)) .subAggregation(max("sum").field("value"))) .execute().actionGet(); @@ -490,7 +491,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.aggregation("stats", "sum", true)) .subAggregation(stats("stats").field("value"))) .execute().actionGet(); @@ -513,7 +514,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.aggregation("stats", "sum", false)) .subAggregation(stats("stats").field("value"))) .execute().actionGet(); @@ -537,7 +538,7 @@ public class DateHistogramIT extends ESIntegTestCase { .addAggregation(dateHistogram("histo") .field("date") .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)) - .interval(DateHistogramInterval.MONTH)).execute().actionGet(); + .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -581,7 +582,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testMultiValuedField() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("dates").interval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").field("dates").dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -625,7 +626,7 @@ public class DateHistogramIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") .field("dates") - .interval(DateHistogramInterval.MONTH) + .dateHistogramInterval(DateHistogramInterval.MONTH) .order(Histogram.Order.COUNT_DESC)) .execute().actionGet(); @@ -671,7 +672,7 @@ public class DateHistogramIT extends ESIntegTestCase { .addAggregation(dateHistogram("histo") .field("dates") .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)) - .interval(DateHistogramInterval.MONTH)).execute().actionGet(); + .dateHistogramInterval(DateHistogramInterval.MONTH)).execute().actionGet(); assertSearchResponse(response); @@ -725,7 +726,7 @@ public class DateHistogramIT extends ESIntegTestCase { .addAggregation(dateHistogram("histo") .field("dates") .script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)) - .interval(DateHistogramInterval.MONTH).subAggregation(max("max"))).execute().actionGet(); + .dateHistogramInterval(DateHistogramInterval.MONTH).subAggregation(max("max"))).execute().actionGet(); assertSearchResponse(response); @@ -786,7 +787,7 @@ public class DateHistogramIT extends ESIntegTestCase { */ public void testScriptSingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -822,7 +823,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testScriptSingleValueWithSubAggregatorInherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") - .script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH) + .script(new Script("date", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH) .subAggregation(max("max"))).execute().actionGet(); assertSearchResponse(response); @@ -866,7 +867,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testScriptMultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -920,7 +921,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testScriptMultiValuedWithAggregatorInherited() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(dateHistogram("histo") - .script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).interval(DateHistogramInterval.MONTH) + .script(new Script("dates", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)).dateHistogramInterval(DateHistogramInterval.MONTH) .subAggregation(max("max"))).execute().actionGet(); assertSearchResponse(response); @@ -974,7 +975,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -987,7 +988,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.MONTH)) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.MONTH)) .execute().actionGet(); assertSearchResponse(response); @@ -1057,8 +1058,8 @@ public class DateHistogramIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addAggregation(dateHistogram("date_histo") .field("date") - .timeZone("-02:00") - .interval(DateHistogramInterval.DAY) + .timeZone(DateTimeZone.forID("-02:00")) + .dateHistogramInterval(DateHistogramInterval.DAY) .format("yyyy-MM-dd:HH-mm-ssZZ")) .execute().actionGet(); @@ -1151,10 +1152,10 @@ public class DateHistogramIT extends ESIntegTestCase { response = client().prepareSearch("idx2") .addAggregation(dateHistogram("histo") .field("date") - .interval(DateHistogramInterval.days(interval)) + .dateHistogramInterval(DateHistogramInterval.days(interval)) .minDocCount(0) // when explicitly specifying a format, the extended bounds should be defined by the same format - .extendedBounds(format(boundsMin, pattern), format(boundsMax, pattern)) + .extendedBounds(new ExtendedBounds(format(boundsMin, pattern), format(boundsMax, pattern))) .format(pattern)) .execute().actionGet(); @@ -1227,8 +1228,8 @@ public class DateHistogramIT extends ESIntegTestCase { .prepareSearch(index) .setQuery(QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getID())) .addAggregation( - dateHistogram("histo").field("date").interval(DateHistogramInterval.hours(1)).timeZone(timezone.getID()).minDocCount(0) - .extendedBounds("now/d", "now/d+23h") + dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.hours(1)).timeZone(timezone).minDocCount(0) + .extendedBounds(new ExtendedBounds("now/d", "now/d+23h")) ).execute().actionGet(); assertSearchResponse(response); @@ -1266,7 +1267,7 @@ public class DateHistogramIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .addAggregation(dateHistogram("date_histo") .field("date") - .interval(DateHistogramInterval.DAY)) + .dateHistogramInterval(DateHistogramInterval.DAY)) .execute().actionGet(); assertSearchHits(response, "0", "1", "2", "3", "4"); @@ -1285,7 +1286,7 @@ public class DateHistogramIT extends ESIntegTestCase { public void testIssue6965() { SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").timeZone("+01:00").interval(DateHistogramInterval.MONTH).minDocCount(0)) + .addAggregation(dateHistogram("histo").field("date").timeZone(DateTimeZone.forID("+01:00")).dateHistogramInterval(DateHistogramInterval.MONTH).minDocCount(0)) .execute().actionGet(); assertSearchResponse(response); @@ -1326,7 +1327,7 @@ public class DateHistogramIT extends ESIntegTestCase { client().prepareIndex("test9491", "type").setSource("d", "2014-11-08T13:00:00Z")); ensureSearchable("test9491"); SearchResponse response = client().prepareSearch("test9491") - .addAggregation(dateHistogram("histo").field("d").interval(DateHistogramInterval.YEAR).timeZone("Asia/Jerusalem")) + .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.YEAR).timeZone(DateTimeZone.forID("Asia/Jerusalem"))) .execute().actionGet(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); @@ -1343,7 +1344,7 @@ public class DateHistogramIT extends ESIntegTestCase { client().prepareIndex("test8209", "type").setSource("d", "2014-04-30T00:00:00Z")); ensureSearchable("test8209"); SearchResponse response = client().prepareSearch("test8209") - .addAggregation(dateHistogram("histo").field("d").interval(DateHistogramInterval.MONTH).timeZone("CET") + .addAggregation(dateHistogram("histo").field("d").dateHistogramInterval(DateHistogramInterval.MONTH).timeZone(DateTimeZone.forID("CET")) .minDocCount(0)) .execute().actionGet(); assertSearchResponse(response); @@ -1361,7 +1362,7 @@ public class DateHistogramIT extends ESIntegTestCase { } /** - * see issue #9634, negative interval in date_histogram should raise exception + * see issue #9634, negative dateHistogramInterval in date_histogram should raise exception */ public void testExceptionOnNegativeInterval() { try { @@ -1370,12 +1371,12 @@ public class DateHistogramIT extends ESIntegTestCase { .actionGet(); fail(); } catch (SearchPhaseExecutionException e) { - assertThat(e.toString(), containsString("ElasticsearchParseException")); + assertThat(e.toString(), containsString("IllegalArgumentException")); } } public void testTimestampField() { // see #11692 - SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("_timestamp").interval(randomFrom(DateHistogramInterval.DAY, DateHistogramInterval.MONTH))).get(); + SearchResponse response = client().prepareSearch("idx").addAggregation(dateHistogram("histo").field("_timestamp").dateHistogramInterval(randomFrom(DateHistogramInterval.DAY, DateHistogramInterval.MONTH))).get(); assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), greaterThan(0)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 75c0fc25e6d..70a6a565224 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -92,7 +92,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { .field("date") .offset("2h") .format(DATE_FORMAT) - .interval(DateHistogramInterval.DAY)) + .dateHistogramInterval(DateHistogramInterval.DAY)) .execute().actionGet(); assertThat(response.getHits().getTotalHits(), equalTo(5l)); @@ -114,7 +114,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { .field("date") .offset("-2h") .format(DATE_FORMAT) - .interval(DateHistogramInterval.DAY)) + .dateHistogramInterval(DateHistogramInterval.DAY)) .execute().actionGet(); assertThat(response.getHits().getTotalHits(), equalTo(5l)); @@ -141,7 +141,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { .offset("6h") .minDocCount(0) .format(DATE_FORMAT) - .interval(DateHistogramInterval.DAY)) + .dateHistogramInterval(DateHistogramInterval.DAY)) .execute().actionGet(); assertThat(response.getHits().getTotalHits(), equalTo(24l)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java new file mode 100644 index 00000000000..bc076e81867 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.elasticsearch.search.aggregations.BaseAggregationTestCase; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; +import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; +import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator.DateHistogramFactory; + +public class DateHistogramTests extends BaseAggregationTestCase { + + @Override + protected DateHistogramFactory createTestAggregatorFactory() { + DateHistogramFactory factory = new DateHistogramFactory("foo"); + factory.field(INT_FIELD_NAME); + if (randomBoolean()) { + factory.interval(randomIntBetween(1, 100000)); + } else { + if (randomBoolean()) { + factory.dateHistogramInterval(randomFrom(DateHistogramInterval.YEAR, DateHistogramInterval.QUARTER, + DateHistogramInterval.MONTH, DateHistogramInterval.WEEK, DateHistogramInterval.DAY, DateHistogramInterval.HOUR, + DateHistogramInterval.MINUTE, DateHistogramInterval.SECOND)); + } else { + int branch = randomInt(4); + switch (branch) { + case 0: + factory.dateHistogramInterval(DateHistogramInterval.seconds(randomIntBetween(1, 1000))); + break; + case 1: + factory.dateHistogramInterval(DateHistogramInterval.minutes(randomIntBetween(1, 1000))); + break; + case 2: + factory.dateHistogramInterval(DateHistogramInterval.hours(randomIntBetween(1, 1000))); + break; + case 3: + factory.dateHistogramInterval(DateHistogramInterval.days(randomIntBetween(1, 1000))); + break; + case 4: + factory.dateHistogramInterval(DateHistogramInterval.weeks(randomIntBetween(1, 1000))); + break; + default: + throw new IllegalStateException("invalid branch: " + branch); + } + } + } + if (randomBoolean()) { + long extendedBoundsMin = randomIntBetween(-100000, 100000); + long extendedBoundsMax = randomIntBetween((int) extendedBoundsMin, 200000); + factory.extendedBounds(new ExtendedBounds(extendedBoundsMin, extendedBoundsMax)); + } + if (randomBoolean()) { + factory.format("###.##"); + } + if (randomBoolean()) { + factory.keyed(randomBoolean()); + } + if (randomBoolean()) { + factory.minDocCount(randomIntBetween(0, 100)); + } + if (randomBoolean()) { + factory.missing(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + factory.offset(randomIntBetween(0, 100000)); + } + if (randomBoolean()) { + int branch = randomInt(5); + switch (branch) { + case 0: + factory.order(Order.COUNT_ASC); + break; + case 1: + factory.order(Order.COUNT_DESC); + break; + case 2: + factory.order(Order.KEY_ASC); + break; + case 3: + factory.order(Order.KEY_DESC); + break; + case 4: + factory.order(Order.aggregation("foo", true)); + break; + case 5: + factory.order(Order.aggregation("foo", false)); + break; + } + } + return factory; + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java index ed3696da267..dc1bf43f0e5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeTests.java @@ -23,15 +23,12 @@ import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range; import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorFactory; -import java.util.ArrayList; -import java.util.List; - public class DateRangeTests extends BaseAggregationTestCase { @Override protected DateRangeAggregatorFactory createTestAggregatorFactory() { int numRanges = randomIntBetween(1, 10); - List ranges = new ArrayList<>(numRanges); + DateRangeAggregatorFactory factory = new DateRangeAggregatorFactory("foo"); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { @@ -42,14 +39,13 @@ public class DateRangeTests extends BaseAggregationTestCase { @Override protected final SamplerAggregator.DiversifiedFactory createTestAggregatorFactory() { - SamplerAggregator.DiversifiedFactory factory = new SamplerAggregator.DiversifiedFactory("foo", ValuesSourceType.ANY, - null); + SamplerAggregator.DiversifiedFactory factory = new SamplerAggregator.DiversifiedFactory("foo"); String field = randomNumericField(); int randomFieldBranch = randomInt(3); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 6e97a33e933..be3dd83dd22 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -93,7 +93,7 @@ public class FilterIT extends ESIntegTestCase { public void testSimple() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filter("tag1").filter(termQuery("tag", "tag1"))) + .addAggregation(filter("tag1", termQuery("tag", "tag1"))) .execute().actionGet(); assertSearchResponse(response); @@ -109,7 +109,7 @@ public class FilterIT extends ESIntegTestCase { // https://github.com/elasticsearch/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); - SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1").filter(emptyFilter)).execute().actionGet(); + SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).execute().actionGet(); assertSearchResponse(response); @@ -120,8 +120,7 @@ public class FilterIT extends ESIntegTestCase { public void testWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(filter("tag1") - .filter(termQuery("tag", "tag1")) + .addAggregation(filter("tag1", termQuery("tag", "tag1")) .subAggregation(avg("avg_value").field("value"))) .execute().actionGet(); @@ -150,7 +149,7 @@ public class FilterIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation( histogram("histo").field("value").interval(2L).subAggregation( - filter("filter").filter(matchAllQuery()))).get(); + filter("filter", matchAllQuery()))).get(); assertSearchResponse(response); @@ -168,8 +167,7 @@ public class FilterIT extends ESIntegTestCase { public void testWithContextBasedSubAggregation() throws Exception { try { client().prepareSearch("idx") - .addAggregation(filter("tag1") - .filter(termQuery("tag", "tag1")) + .addAggregation(filter("tag1", termQuery("tag", "tag1")) .subAggregation(avg("avg_value"))) .execute().actionGet(); @@ -185,7 +183,7 @@ public class FilterIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0) - .subAggregation(filter("filter").filter(matchAllQuery()))) + .subAggregation(filter("filter", matchAllQuery()))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index 6dbd3094e6f..c84c5f5860e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.filters.Filters; +import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.test.ESIntegTestCase; @@ -109,11 +110,8 @@ public class FiltersIT extends ESIntegTestCase { } public void testSimple() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters("tags") - .filter("tag1", termQuery("tag", "tag1")) - .filter("tag2", termQuery("tag", "tag2"))) + SearchResponse response = client().prepareSearch("idx").addAggregation( + filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))) .execute().actionGet(); assertSearchResponse(response); @@ -136,10 +134,10 @@ public class FiltersIT extends ESIntegTestCase { // See NullPointer issue when filters are empty: // https://github.com/elasticsearch/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { - QueryBuilder emptyFilter = new BoolQueryBuilder(); + QueryBuilder emptyFilter = new BoolQueryBuilder(); SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags").filter("all", emptyFilter).filter("tag1", termQuery("tag", "tag1"))).execute() - .actionGet(); + .addAggregation(filters("tags", new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1")))) + .execute().actionGet(); assertSearchResponse(response); @@ -155,11 +153,8 @@ public class FiltersIT extends ESIntegTestCase { public void testWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters("tags") - .filter("tag1", termQuery("tag", "tag1")) - .filter("tag2", termQuery("tag", "tag2")) - .subAggregation(avg("avg_value").field("value"))) + .addAggregation(filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), + new KeyedFilter("tag2", termQuery("tag", "tag2"))).subAggregation(avg("avg_value").field("value"))) .execute().actionGet(); assertSearchResponse(response); @@ -210,7 +205,7 @@ public class FiltersIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .addAggregation( histogram("histo").field("value").interval(2L).subAggregation( - filters("filters").filter(matchAllQuery()))).get(); + filters("filters", matchAllQuery()))).get(); assertSearchResponse(response); @@ -232,9 +227,7 @@ public class FiltersIT extends ESIntegTestCase { try { client().prepareSearch("idx") .addAggregation( - filters("tags") - .filter("tag1", termQuery("tag", "tag1")) - .filter("tag2", termQuery("tag", "tag2")) + filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) .subAggregation(avg("avg_value")) ) .execute().actionGet(); @@ -251,7 +244,7 @@ public class FiltersIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0) - .subAggregation(filters("filters").filter("all", matchAllQuery()))) + .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery())))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); @@ -270,11 +263,7 @@ public class FiltersIT extends ESIntegTestCase { public void testSimpleNonKeyed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters("tags") - .filter(termQuery("tag", "tag1")) - .filter(termQuery("tag", "tag2"))) - .execute().actionGet(); + .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2"))).execute().actionGet(); assertSearchResponse(response); @@ -297,12 +286,9 @@ public class FiltersIT extends ESIntegTestCase { } public void testOtherBucket() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - filters("tags").otherBucket(true) - .filter("tag1", termQuery("tag", "tag1")) - .filter("tag2", termQuery("tag", "tag2"))) + SearchResponse response = client().prepareSearch("idx").addAggregation( + filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + .otherBucket(true)) .execute().actionGet(); assertSearchResponse(response); @@ -327,12 +313,9 @@ public class FiltersIT extends ESIntegTestCase { } public void testOtherNamedBucket() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - filters("tags").otherBucket(true).otherBucketKey("foobar") - .filter("tag1", termQuery("tag", "tag1")) - .filter("tag2", termQuery("tag", "tag2"))) + SearchResponse response = client().prepareSearch("idx") + .addAggregation(filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), + new KeyedFilter("tag2", termQuery("tag", "tag2"))).otherBucket(true).otherBucketKey("foobar")) .execute().actionGet(); assertSearchResponse(response); @@ -358,11 +341,8 @@ public class FiltersIT extends ESIntegTestCase { public void testOtherNonKeyed() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters("tags").otherBucket(true) - .filter(termQuery("tag", "tag1")) - .filter(termQuery("tag", "tag2"))) - .execute().actionGet(); + .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")).otherBucket(true)).execute() + .actionGet(); assertSearchResponse(response); @@ -390,10 +370,8 @@ public class FiltersIT extends ESIntegTestCase { public void testOtherWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters("tags").otherBucket(true) - .filter("tag1", termQuery("tag", "tag1")) - .filter("tag2", termQuery("tag", "tag2")) + .addAggregation(filters("tags", new KeyedFilter("tag1", termQuery("tag", "tag1")), + new KeyedFilter("tag2", termQuery("tag", "tag2"))).otherBucket(true) .subAggregation(avg("avg_value").field("value"))) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 57c4c8bc6fc..f6ebbbc4dc7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -140,10 +141,9 @@ public class GeoDistanceIT extends ESIntegTestCase { public void testSimple() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoDistance("amsterdam_rings") + .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) .field("location") .unit(DistanceUnit.KILOMETERS) - .point("52.3760, 4.894") // coords of amsterdam .addUnboundedTo(500) .addRange(500, 1000) .addUnboundedFrom(1000)) @@ -188,10 +188,9 @@ public class GeoDistanceIT extends ESIntegTestCase { public void testSimpleWithCustomKeys() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoDistance("amsterdam_rings") + .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) .field("location") .unit(DistanceUnit.KILOMETERS) - .point("52.3760, 4.894") // coords of amsterdam .addUnboundedTo("ring1", 500) .addRange("ring2", 500, 1000) .addUnboundedFrom("ring3", 1000)) @@ -238,10 +237,9 @@ public class GeoDistanceIT extends ESIntegTestCase { client().admin().cluster().prepareHealth("idx_unmapped").setWaitForYellowStatus().execute().actionGet(); SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(geoDistance("amsterdam_rings") + .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) .field("location") .unit(DistanceUnit.KILOMETERS) - .point("52.3760, 4.894") // coords of amsterdam .addUnboundedTo(500) .addRange(500, 1000) .addUnboundedFrom(1000)) @@ -286,10 +284,9 @@ public class GeoDistanceIT extends ESIntegTestCase { public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(geoDistance("amsterdam_rings") + .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) .field("location") .unit(DistanceUnit.KILOMETERS) - .point("52.3760, 4.894") // coords of amsterdam .addUnboundedTo(500) .addRange(500, 1000) .addUnboundedFrom(1000)) @@ -334,10 +331,9 @@ public class GeoDistanceIT extends ESIntegTestCase { public void testWithSubAggregation() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoDistance("amsterdam_rings") + .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) .field("location") .unit(DistanceUnit.KILOMETERS) - .point("52.3760, 4.894") // coords of amsterdam .addUnboundedTo(500) .addRange(500, 1000) .addUnboundedFrom(1000) @@ -422,7 +418,7 @@ public class GeoDistanceIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0) - .subAggregation(geoDistance("geo_dist").field("location").point("52.3760, 4.894").addRange("0-100", 0.0, 100.0))) + .subAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location").addRange("0-100", 0.0, 100.0))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); @@ -447,11 +443,10 @@ public class GeoDistanceIT extends ESIntegTestCase { public void testMultiValues() throws Exception { SearchResponse response = client().prepareSearch("idx-multi") - .addAggregation(geoDistance("amsterdam_rings") + .addAggregation(geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)) .field("location") .unit(DistanceUnit.KILOMETERS) .distanceType(org.elasticsearch.common.geo.GeoDistance.ARC) - .point("52.3760, 4.894") // coords of amsterdam .addUnboundedTo(500) .addRange(500, 1000) .addUnboundedFrom(1000)) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java index 9d579ad0425..a3ac9c49a44 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceRangeTests.java @@ -27,15 +27,13 @@ import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanc import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range; import org.elasticsearch.test.geo.RandomShapeGenerator; -import java.util.ArrayList; -import java.util.List; - public class GeoDistanceRangeTests extends BaseAggregationTestCase { @Override protected GeoDistanceFactory createTestAggregatorFactory() { int numRanges = randomIntBetween(1, 10); - List ranges = new ArrayList<>(numRanges); + GeoPoint origin = RandomShapeGenerator.randomPoint(getRandom()); + GeoDistanceFactory factory = new GeoDistanceFactory("foo", origin); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { @@ -45,10 +43,8 @@ public class GeoDistanceRangeTests extends BaseAggregationTestCase { @Override protected IPv4RangeAggregatorFactory createTestAggregatorFactory() { int numRanges = randomIntBetween(1, 10); - List ranges = new ArrayList<>(numRanges); + IPv4RangeAggregatorFactory factory = new IPv4RangeAggregatorFactory("foo"); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { @@ -45,11 +42,11 @@ public class IPv4RangeTests extends BaseAggregationTestCase { @Override protected Factory createTestAggregatorFactory() { int numRanges = randomIntBetween(1, 10); - List ranges = new ArrayList<>(numRanges); + Factory factory = new Factory("foo"); for (int i = 0; i < numRanges; i++) { String key = null; if (randomBoolean()) { @@ -43,14 +40,13 @@ public class RangeTests extends BaseAggregationTestCase : (Double.isInfinite(from) ? randomIntBetween(Integer.MIN_VALUE, Integer.MAX_VALUE) : randomIntBetween((int) from, Integer.MAX_VALUE)); if (randomBoolean()) { - ranges.add(new Range(key, from, to)); + factory.addRange(new Range(key, from, to)); } else { String fromAsStr = Double.isInfinite(from) ? null : String.valueOf(from); String toAsStr = Double.isInfinite(to) ? null : String.valueOf(to); - ranges.add(new Range(key, fromAsStr, toAsStr)); + factory.addRange(new Range(key, fromAsStr, toAsStr)); } } - Factory factory = new Factory("foo", ranges); factory.field(INT_FIELD_NAME); if (randomBoolean()) { factory.format("###.##"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java index 88000630437..438f9b49568 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java @@ -138,7 +138,7 @@ public class ReverseNestedIT extends ESIntegTestCase { public void testSimpleReverseNestedToRoot() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type1") - .addAggregation(nested("nested1").path("nested1") + .addAggregation(nested("nested1", "nested1") .subAggregation( terms("field2").field("nested1.field2") .subAggregation( @@ -326,10 +326,10 @@ public class ReverseNestedIT extends ESIntegTestCase { public void testSimpleNested1ToRootToNested2() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type2") - .addAggregation(nested("nested1").path("nested1") + .addAggregation(nested("nested1", "nested1") .subAggregation( reverseNested("nested1_to_root") - .subAggregation(nested("root_to_nested2").path("nested1.nested2")) + .subAggregation(nested("root_to_nested2", "nested1.nested2")) ) ) .get(); @@ -348,7 +348,7 @@ public class ReverseNestedIT extends ESIntegTestCase { public void testSimpleReverseNestedToNested1() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type2") - .addAggregation(nested("nested1").path("nested1.nested2") + .addAggregation(nested("nested1", "nested1.nested2") .subAggregation( terms("field2").field("nested1.nested2.field2").order(Terms.Order.term(true)) .collectMode(randomFrom(SubAggCollectionMode.values())) @@ -470,7 +470,7 @@ public class ReverseNestedIT extends ESIntegTestCase { public void testNonExistingNestedField() throws Exception { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(nested("nested2").path("nested1.nested2").subAggregation(reverseNested("incorrect").path("nested3"))) + .addAggregation(nested("nested2", "nested1.nested2").subAggregation(reverseNested("incorrect").path("nested3"))) .execute().actionGet(); Nested nested = searchResponse.getAggregations().get("nested2"); @@ -558,11 +558,11 @@ public class ReverseNestedIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx3") .addAggregation( - nested("nested_0").path("category").subAggregation( + nested("nested_0", "category").subAggregation( terms("group_by_category").field("category.name").subAggregation( reverseNested("to_root").subAggregation( - nested("nested_1").path("sku").subAggregation( - filter("filter_by_sku").filter(termQuery("sku.sku_type", "bar1")).subAggregation( + nested("nested_1", "sku").subAggregation( + filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( count("sku_count").field("sku.sku_type") ) ) @@ -593,13 +593,13 @@ public class ReverseNestedIT extends ESIntegTestCase { response = client().prepareSearch("idx3") .addAggregation( - nested("nested_0").path("category").subAggregation( + nested("nested_0", "category").subAggregation( terms("group_by_category").field("category.name").subAggregation( reverseNested("to_root").subAggregation( - nested("nested_1").path("sku").subAggregation( - filter("filter_by_sku").filter(termQuery("sku.sku_type", "bar1")).subAggregation( - nested("nested_2").path("sku.colors").subAggregation( - filter("filter_sku_color").filter(termQuery("sku.colors.name", "red")).subAggregation( + nested("nested_1", "sku").subAggregation( + filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( + nested("nested_2", "sku.colors").subAggregation( + filter("filter_sku_color", termQuery("sku.colors.name", "red")).subAggregation( reverseNested("reverse_to_sku").path("sku").subAggregation( count("sku_count").field("sku.sku_type") ) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java index bcc40e873b4..d0ed8bc39f1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java @@ -23,11 +23,9 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.test.ESIntegTestCase; @@ -40,6 +38,8 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -124,8 +124,8 @@ public class SamplerIT extends ESIntegTestCase { } public void testSimpleSampler() throws Exception { - SamplerAggregationBuilder sampleAgg = new SamplerAggregationBuilder("sample").shardSize(100); - sampleAgg.subAggregation(new TermsBuilder("authors").field("author")); + SamplerAggregator.Factory sampleAgg = sampler("sample").shardSize(100); + sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")).setFrom(0).setSize(60).addAggregation(sampleAgg).execute().actionGet(); assertSearchResponse(response); @@ -141,8 +141,8 @@ public class SamplerIT extends ESIntegTestCase { } public void testUnmappedChildAggNoDiversity() throws Exception { - SamplerAggregationBuilder sampleAgg = new SamplerAggregationBuilder("sample").shardSize(100); - sampleAgg.subAggregation(new TermsBuilder("authors").field("author")); + SamplerAggregator.Factory sampleAgg = sampler("sample").shardSize(100); + sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped") .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) @@ -158,8 +158,8 @@ public class SamplerIT extends ESIntegTestCase { } public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { - SamplerAggregationBuilder sampleAgg = new SamplerAggregationBuilder("sample").shardSize(100); - sampleAgg.subAggregation(new TermsBuilder("authors").field("author")); + SamplerAggregator.Factory sampleAgg = sampler("sample").shardSize(100); + sampleAgg.subAggregation(terms("authors").field("author")); SearchResponse response = client().prepareSearch("idx_unmapped", "test") .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java index d138c0ccd3e..8c6f30c8c1e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java @@ -91,7 +91,7 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(global("global") - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -104,8 +104,8 @@ public class ShardReduceIT extends ESIntegTestCase { public void testFilter() throws Exception { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(filter("filter").filter(QueryBuilders.matchAllQuery()) - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .addAggregation(filter("filter", QueryBuilders.matchAllQuery()) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -119,7 +119,7 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(missing("missing").field("foobar") - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -133,9 +133,9 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(global("global") - .subAggregation(filter("filter").filter(QueryBuilders.matchAllQuery()) + .subAggregation(filter("filter", QueryBuilders.matchAllQuery()) .subAggregation(missing("missing").field("foobar") - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))))) .execute().actionGet(); assertSearchResponse(response); @@ -150,8 +150,8 @@ public class ShardReduceIT extends ESIntegTestCase { public void testNested() throws Exception { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(nested("nested").path("nested") - .subAggregation(dateHistogram("histo").field("nested.date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .addAggregation(nested("nested", "nested") + .subAggregation(dateHistogram("histo").field("nested.date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -166,7 +166,7 @@ public class ShardReduceIT extends ESIntegTestCase { .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(terms("terms").field("term-s") .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -181,7 +181,7 @@ public class ShardReduceIT extends ESIntegTestCase { .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(terms("terms").field("term-l") .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -196,7 +196,7 @@ public class ShardReduceIT extends ESIntegTestCase { .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(terms("terms").field("term-d") .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -210,7 +210,7 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(range("range").field("value").addRange("r1", 0, 10) - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -224,7 +224,7 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(dateRange("range").field("date").addRange("r1", "2014-01-01", "2014-01-10") - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -238,7 +238,7 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(ipRange("range").field("ip").addRange("r1", "10.0.0.1", "10.0.0.10") - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -252,7 +252,7 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(histogram("topHisto").field("value").interval(5) - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -265,8 +265,8 @@ public class ShardReduceIT extends ESIntegTestCase { public void testDateHistogram() throws Exception { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) - .addAggregation(dateHistogram("topHisto").field("date").interval(DateHistogramInterval.MONTH) - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .addAggregation(dateHistogram("topHisto").field("date").dateHistogramInterval(DateHistogramInterval.MONTH) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); @@ -281,7 +281,7 @@ public class ShardReduceIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("idx") .setQuery(QueryBuilders.matchAllQuery()) .addAggregation(geohashGrid("grid").field("location") - .subAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).minDocCount(0))) + .subAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(0))) .execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java index 7582d75ca0b..214a8e6c61b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsIT.java @@ -27,14 +27,13 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms.Bucket; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory.ExecutionMode; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; @@ -46,6 +45,8 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -116,7 +117,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) + .addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) .minDocCount(2)) .execute() .actionGet(); @@ -132,8 +133,8 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "paul")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) - .minDocCount(1).exclude(excludeTerms)) + .addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) + .minDocCount(1).includeExclude(new IncludeExclude(null, excludeTerms))) .execute() .actionGet(); assertSearchResponse(response); @@ -145,8 +146,8 @@ public class SignificantTermsIT extends ESIntegTestCase { public void testIncludeExclude() throws Exception { SearchResponse response = client().prepareSearch("test") .setQuery(new TermQueryBuilder("_all", "weller")) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()) - .exclude("weller")) + .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) + .includeExclude(new IncludeExclude(null, "weller"))) .get(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); @@ -164,8 +165,8 @@ public class SignificantTermsIT extends ESIntegTestCase { response = client().prepareSearch("test") .setQuery(new TermQueryBuilder("_all", "weller")) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()) - .include("weller")) + .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) + .includeExclude(new IncludeExclude("weller", null))) .get(); assertSearchResponse(response); topTerms = response.getAggregations().get("mySignificantTerms"); @@ -181,8 +182,8 @@ public class SignificantTermsIT extends ESIntegTestCase { String []incExcTerms={"weller","nosuchterm"}; SearchResponse response = client().prepareSearch("test") .setQuery(new TermQueryBuilder("_all", "weller")) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()) - .exclude(incExcTerms)) + .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) + .includeExclude(new IncludeExclude(null, incExcTerms))) .get(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); @@ -194,8 +195,8 @@ public class SignificantTermsIT extends ESIntegTestCase { response = client().prepareSearch("test") .setQuery(new TermQueryBuilder("_all", "weller")) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()) - .include(incExcTerms)) + .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) + .includeExclude(new IncludeExclude(incExcTerms, null))) .get(); assertSearchResponse(response); topTerms = response.getAggregations().get("mySignificantTerms"); @@ -212,7 +213,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) + .addAggregation(significantTerms("mySignificantTerms").field("fact_category").executionHint(randomExecutionHint()) .minDocCount(2)) .execute() .actionGet(); @@ -226,7 +227,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()) + .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) .minDocCount(2)) .execute() .actionGet(); @@ -240,7 +241,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new GND.GNDBuilder(true)) + .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new GND(true)) .minDocCount(2)) .execute() .actionGet(); @@ -254,7 +255,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new ChiSquare.ChiSquareBuilder(false,true)) + .addAggregation(significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()).significanceHeuristic(new ChiSquare(false,true)) .minDocCount(2)) .execute() .actionGet(); @@ -272,8 +273,8 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSize(60) .setExplain(true) .addAggregation( - new SignificantTermsBuilder("mySignificantTerms").field("description").executionHint(randomExecutionHint()) - .significanceHeuristic(new PercentageScore.PercentageScoreBuilder()).minDocCount(2)).execute().actionGet(); + significantTerms("mySignificantTerms").field("description").executionHint(randomExecutionHint()) + .significanceHeuristic(new PercentageScore()).minDocCount(2)).execute().actionGet(); assertSearchResponse(response); SignificantTerms topTerms = response.getAggregations().get("mySignificantTerms"); checkExpectedStringTermsFound(topTerms); @@ -288,7 +289,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description") + .addAggregation(significantTerms("mySignificantTerms").field("description") .minDocCount(2).backgroundFilter(QueryBuilders.termQuery("fact_category", 1))) .execute() .actionGet(); @@ -312,7 +313,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "weller")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description") + .addAggregation(significantTerms("mySignificantTerms").field("description") .minDocCount(1).backgroundFilter(QueryBuilders.termsQuery("description", "paul"))) .execute() .actionGet(); @@ -335,9 +336,9 @@ public class SignificantTermsIT extends ESIntegTestCase { { "craig", "kelly", "terje", "haakonsen", "burton" }}; SearchResponse response = client().prepareSearch("test") .setSearchType(SearchType.QUERY_AND_FETCH) - .addAggregation(new TermsBuilder("myCategories").field("fact_category").minDocCount(2) + .addAggregation(terms("myCategories").field("fact_category").minDocCount(2) .subAggregation( - new SignificantTermsBuilder("mySignificantTerms").field("description") + significantTerms("mySignificantTerms").field("description") .executionHint(randomExecutionHint()) .minDocCount(2))) .execute() @@ -362,7 +363,7 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms").field("description") + .addAggregation(significantTerms("mySignificantTerms").field("description") .executionHint(randomExecutionHint()) .minDocCount(2)) .execute() @@ -393,10 +394,10 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms") + .addAggregation(significantTerms("mySignificantTerms") .field("description") .executionHint(randomExecutionHint()) - .significanceHeuristic(new JLHScore.JLHScoreBuilder()) + .significanceHeuristic(new JLHScore()) .minDocCount(2)) .execute() .actionGet(); @@ -410,10 +411,10 @@ public class SignificantTermsIT extends ESIntegTestCase { .setSearchType(SearchType.QUERY_AND_FETCH) .setQuery(new TermQueryBuilder("_all", "terje")) .setFrom(0).setSize(60).setExplain(true) - .addAggregation(new SignificantTermsBuilder("mySignificantTerms") + .addAggregation(significantTerms("mySignificantTerms") .field("description") .executionHint(randomExecutionHint()) - .significanceHeuristic(new MutualInformation.MutualInformationBuilder(false, true)) + .significanceHeuristic(new MutualInformation(false, true)) .minDocCount(1)) .execute() .actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index eb4143017bf..33f17c328cd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -35,13 +35,11 @@ import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams; import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptWithParams; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory; -import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; @@ -52,7 +50,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods; @@ -69,6 +66,9 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; +import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -99,11 +99,12 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) - .addAggregation(new TermsBuilder("class") + .addAggregation( + terms("class") .field(CLASS_FIELD) - .subAggregation((new SignificantTermsBuilder("sig_terms")) + .subAggregation((significantTerms("sig_terms")) .field(TEXT_FIELD) - .significanceHeuristic(new SimpleHeuristic.SimpleHeuristicBuilder()) + .significanceHeuristic(new SimpleHeuristic()) .minDocCount(1) ) ) @@ -131,11 +132,12 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { // the reason is that this would trigger toXContent and we would like to check that this has no potential side effects response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) - .addAggregation(new TermsBuilder("class") + .addAggregation( + terms("class") .field(CLASS_FIELD) - .subAggregation((new SignificantTermsBuilder("sig_terms")) + .subAggregation((significantTerms("sig_terms")) .field(TEXT_FIELD) - .significanceHeuristic(new SimpleHeuristic.SimpleHeuristicBuilder()) + .significanceHeuristic(new SimpleHeuristic()) .minDocCount(1) ) ) @@ -211,6 +213,22 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { return builder; } + @Override + public int hashCode() { + return 1; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + return true; + } + /** * @param subsetFreq The frequency of the term in the selected sample * @param subsetSize The size of the selected sample (typically number of docs) @@ -237,15 +255,6 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { return NAMES_FIELD.getAllNamesIncludedDeprecated(); } } - - public static class SimpleHeuristicBuilder implements SignificanceHeuristicBuilder { - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAMES_FIELD.getPreferredName()).endObject(); - return builder; - } - } } public void testXContentResponse() throws Exception { @@ -253,7 +262,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); SearchResponse response = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) - .addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("sig_terms").field(TEXT_FIELD))) + .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))) .execute() .actionGet(); assertSearchResponse(response); @@ -313,10 +322,11 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { indexRandom(true, false, indexRequestBuilderList); SearchResponse response1 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) - .addAggregation(new TermsBuilder("class") + .addAggregation( + terms("class") .field(CLASS_FIELD) .subAggregation( - new SignificantTermsBuilder("sig_terms") + significantTerms("sig_terms") .field(TEXT_FIELD) .minDocCount(1))) .execute() @@ -327,22 +337,22 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { String type = randomBoolean() ? "string" : "long"; String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; SharedSignificantTermsTestMethods.index01Docs(type, settings, this); - testBackgroundVsSeparateSet(new MutualInformation.MutualInformationBuilder(true, true), new MutualInformation.MutualInformationBuilder(true, false)); - testBackgroundVsSeparateSet(new ChiSquare.ChiSquareBuilder(true, true), new ChiSquare.ChiSquareBuilder(true, false)); - testBackgroundVsSeparateSet(new GND.GNDBuilder(true), new GND.GNDBuilder(false)); + testBackgroundVsSeparateSet(new MutualInformation(true, true), new MutualInformation(true, false)); + testBackgroundVsSeparateSet(new ChiSquare(true, true), new ChiSquare(true, false)); + testBackgroundVsSeparateSet(new GND(true), new GND(false)); } // compute significance score by // 1. terms agg on class and significant terms // 2. filter buckets and set the background to the other class and set is_background false // both should yield exact same result - public void testBackgroundVsSeparateSet(SignificanceHeuristicBuilder significanceHeuristicExpectingSuperset, SignificanceHeuristicBuilder significanceHeuristicExpectingSeparateSets) throws Exception { + public void testBackgroundVsSeparateSet(SignificanceHeuristic significanceHeuristicExpectingSuperset, SignificanceHeuristic significanceHeuristicExpectingSeparateSets) throws Exception { SearchResponse response1 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) - .addAggregation(new TermsBuilder("class") + .addAggregation(terms("class") .field(CLASS_FIELD) .subAggregation( - new SignificantTermsBuilder("sig_terms") + significantTerms("sig_terms") .field(TEXT_FIELD) .minDocCount(1) .significanceHeuristic( @@ -351,16 +361,14 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { .actionGet(); assertSearchResponse(response1); SearchResponse response2 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE) - .addAggregation((new FilterAggregationBuilder("0")) - .filter(QueryBuilders.termQuery(CLASS_FIELD, "0")) - .subAggregation(new SignificantTermsBuilder("sig_terms") + .addAggregation(filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")) + .subAggregation(significantTerms("sig_terms") .field(TEXT_FIELD) .minDocCount(1) .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) .significanceHeuristic(significanceHeuristicExpectingSeparateSets))) - .addAggregation((new FilterAggregationBuilder("1")) - .filter(QueryBuilders.termQuery(CLASS_FIELD, "1")) - .subAggregation(new SignificantTermsBuilder("sig_terms") + .addAggregation(filter("1", QueryBuilders.termQuery(CLASS_FIELD, "1")) + .subAggregation(significantTerms("sig_terms") .field(TEXT_FIELD) .minDocCount(1) .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "0")) @@ -389,15 +397,15 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { public void testScoresEqualForPositiveAndNegative() throws Exception { indexEqualTestData(); - testScoresEqualForPositiveAndNegative(new MutualInformation.MutualInformationBuilder(true, true)); - testScoresEqualForPositiveAndNegative(new ChiSquare.ChiSquareBuilder(true, true)); + testScoresEqualForPositiveAndNegative(new MutualInformation(true, true)); + testScoresEqualForPositiveAndNegative(new ChiSquare(true, true)); } - public void testScoresEqualForPositiveAndNegative(SignificanceHeuristicBuilder heuristic) throws Exception { + public void testScoresEqualForPositiveAndNegative(SignificanceHeuristic heuristic) throws Exception { //check that results for both classes are the same with exclude negatives = false and classes are routing ids SearchResponse response = client().prepareSearch("test") - .addAggregation(new TermsBuilder("class").field("class").subAggregation(new SignificantTermsBuilder("mySignificantTerms") + .addAggregation(terms("class").field("class").subAggregation(significantTerms("mySignificantTerms") .field("text") .executionHint(randomExecutionHint()) .significanceHeuristic(heuristic) @@ -454,13 +462,14 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { public void testScriptScore() throws ExecutionException, InterruptedException, IOException { indexRandomFrequencies01(randomBoolean() ? "string" : "long"); - ScriptHeuristic.ScriptHeuristicBuilder scriptHeuristicBuilder = getScriptSignificanceHeuristicBuilder(); + ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic(); ensureYellow(); SearchResponse response = client().prepareSearch(INDEX_NAME) - .addAggregation(new TermsBuilder("class").field(CLASS_FIELD).subAggregation(new SignificantTermsBuilder("mySignificantTerms") + .addAggregation(terms("class").field(CLASS_FIELD) + .subAggregation(significantTerms("mySignificantTerms") .field(TEXT_FIELD) .executionHint(randomExecutionHint()) - .significanceHeuristic(scriptHeuristicBuilder) + .significanceHeuristic(scriptHeuristic) .minDocCount(1).shardSize(2).size(2))) .execute() .actionGet(); @@ -472,7 +481,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } } - private ScriptHeuristic.ScriptHeuristicBuilder getScriptSignificanceHeuristicBuilder() throws IOException { + private ScriptHeuristic getScriptSignificanceHeuristic() throws IOException { Script script = null; if (randomBoolean()) { Map params = null; @@ -482,9 +491,9 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } else { script = new Script("native_significance_score_script_no_params", ScriptType.INLINE, "native", null); } - ScriptHeuristic.ScriptHeuristicBuilder builder = new ScriptHeuristic.ScriptHeuristicBuilder().setScript(script); + ScriptHeuristic scriptHeuristic = new ScriptHeuristic(script); - return builder; + return scriptHeuristic; } private void indexRandomFrequencies01(String type) throws ExecutionException, InterruptedException { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java index 8ad928e5ed5..d0c9e616f5f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsTests.java @@ -34,8 +34,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Scrip import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; - import java.util.SortedSet; import java.util.TreeSet; @@ -54,7 +52,7 @@ public class SignificantTermsTests extends BaseAggregationTestCase @Override protected TermsAggregatorFactory createTestAggregatorFactory() { String name = randomAsciiOfLengthBetween(3, 20); - TermsAggregatorFactory factory = new TermsAggregatorFactory(name, ValuesSourceType.ANY, null); + TermsAggregatorFactory factory = new TermsAggregatorFactory(name, null); String field = randomAsciiOfLengthBetween(3, 20); int randomFieldBranch = randomInt(2); switch (randomFieldBranch) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 865a3513bd4..b2cd7b0f197 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHSc import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -67,6 +66,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; /** * @@ -209,10 +209,10 @@ public class SignificanceHeuristicTests extends ESTestCase { assertThat(parseFromString(heuristicParserMapper, searchContext, "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new ChiSquare(includeNegatives, backgroundIsSuperset)))); // test with builders - assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new JLHScore.JLHScoreBuilder()) instanceof JLHScore); - assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new GND.GNDBuilder(backgroundIsSuperset)) instanceof GND); - assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new MutualInformation.MutualInformationBuilder(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new MutualInformation(includeNegatives, backgroundIsSuperset))); - assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new ChiSquare.ChiSquareBuilder(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new ChiSquare(includeNegatives, backgroundIsSuperset))); + assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new JLHScore()) instanceof JLHScore); + assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new GND(backgroundIsSuperset)) instanceof GND); + assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new MutualInformation(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new MutualInformation(includeNegatives, backgroundIsSuperset))); + assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new ChiSquare(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new ChiSquare(includeNegatives, backgroundIsSuperset))); // test exceptions String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}"; @@ -249,9 +249,9 @@ public class SignificanceHeuristicTests extends ESTestCase { } } - protected SignificanceHeuristic parseFromBuilder(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, SignificanceHeuristicBuilder significanceHeuristicBuilder) throws IOException { - SignificantTermsBuilder stBuilder = new SignificantTermsBuilder("testagg"); - stBuilder.significanceHeuristic(significanceHeuristicBuilder).field("text").minDocCount(200); + protected SignificanceHeuristic parseFromBuilder(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, SignificanceHeuristic significanceHeuristic) throws IOException { + SignificantTermsAggregatorFactory stBuilder = significantTerms("testagg"); + stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); stBuilder.internalXContent(stXContentBuilder, null); XContentParser stParser = JsonXContent.jsonXContent.createParser(stXContentBuilder.string()); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java index 4c67155b78d..6e0abf3fa08 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java @@ -26,9 +26,6 @@ import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.Factory; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; -import java.util.ArrayList; -import java.util.List; - public class FiltersTests extends BaseAggregationTestCase { @Override @@ -37,13 +34,13 @@ public class FiltersTests extends BaseAggregationTestCase filters = new ArrayList<>(size); + KeyedFilter[] filters = new KeyedFilter[size]; for (int i = 0; i < size; i++) { // NORELEASE make RandomQueryBuilder work outside of the // AbstractQueryTestCase // builder.query(RandomQueryBuilder.createQuery(getRandom())); - filters.add(new KeyedFilter(randomAsciiOfLengthBetween(1, 20), - QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20)))); + filters[i] = new KeyedFilter(randomAsciiOfLengthBetween(1, 20), + QueryBuilders.termQuery(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20))); } factory = new Factory(randomAsciiOfLengthBetween(1, 20), filters); } else { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java similarity index 82% rename from core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java index 8f651764edf..37b7fa2c5d1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java @@ -21,14 +21,13 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentileRanksAggregator; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentileRanksAggregator.Factory; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorFactory; -public class HDRPercentileRanksTests extends BaseAggregationTestCase { +public class PercentileRanksTests extends BaseAggregationTestCase { @Override - protected Factory createTestAggregatorFactory() { - Factory factory = new Factory(randomAsciiOfLengthBetween(1, 20)); + protected PercentileRanksAggregatorFactory createTestAggregatorFactory() { + PercentileRanksAggregatorFactory factory = new PercentileRanksAggregatorFactory(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.keyed(randomBoolean()); } @@ -41,6 +40,9 @@ public class HDRPercentileRanksTests extends BaseAggregationTestCase { +public class PercentilesTests extends BaseAggregationTestCase { @Override - protected Factory createTestAggregatorFactory() { - Factory factory = new Factory(randomAsciiOfLengthBetween(1, 20)); + protected PercentilesAggregatorFactory createTestAggregatorFactory() { + PercentilesAggregatorFactory factory = new PercentilesAggregatorFactory(randomAsciiOfLengthBetween(1, 20)); if (randomBoolean()) { factory.keyed(randomBoolean()); } @@ -38,11 +37,14 @@ public class HDRPercentilesTests extends BaseAggregationTestCase { - - @Override - protected Factory createTestAggregatorFactory() { - Factory factory = new Factory(randomAsciiOfLengthBetween(1, 20)); - if (randomBoolean()) { - factory.keyed(randomBoolean()); - } - int valuesSize = randomIntBetween(1, 20); - double[] values = new double[valuesSize]; - for (int i = 0; i < valuesSize; i++) { - values[i] = randomDouble() * 100; - } - factory.values(values); - if (randomBoolean()) { - factory.compression(randomDoubleBetween(10, 40000, true)); - } - String field = randomNumericField(); - int randomFieldBranch = randomInt(3); - switch (randomFieldBranch) { - case 0: - factory.field(field); - break; - case 1: - factory.field(field); - factory.script(new Script("_value + 1")); - break; - case 2: - factory.script(new Script("doc[" + field + "] + 1")); - break; - } - if (randomBoolean()) { - factory.missing("MISSING"); - } - return factory; - } - -} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesTests.java deleted file mode 100644 index 898bda797d5..00000000000 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesTests.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.aggregations.metrics; - -import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregator; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregator.Factory; - -public class TDigestPercentilesTests extends BaseAggregationTestCase { - - @Override - protected Factory createTestAggregatorFactory() { - Factory factory = new Factory(randomAsciiOfLengthBetween(1, 20)); - if (randomBoolean()) { - factory.keyed(randomBoolean()); - } - if (randomBoolean()) { - int percentsSize = randomIntBetween(1, 20); - double[] percents = new double[percentsSize]; - for (int i = 0; i < percentsSize; i++) { - percents[i] = randomDouble() * 100; - } - factory.percents(percents); - } - if (randomBoolean()) { - factory.compression(randomDoubleBetween(10, 40000, true)); - } - String field = randomNumericField(); - int randomFieldBranch = randomInt(3); - switch (randomFieldBranch) { - case 0: - factory.field(field); - break; - case 1: - factory.field(field); - factory.script(new Script("_value + 1")); - break; - case 2: - factory.script(new Script("doc[" + field + "] + 1")); - break; - } - if (randomBoolean()) { - factory.missing("MISSING"); - } - if (randomBoolean()) { - factory.format("###.00"); - } - return factory; - } - -} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 1788213e45c..6c1ff6a380b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -690,8 +690,7 @@ public class TopHitsIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("articles") .setQuery(matchQuery("title", "title")) .addAggregation( - nested("to-comments") - .path("comments") + nested("to-comments", "comments") .subAggregation( terms("users") .field("comments.user") @@ -741,10 +740,9 @@ public class TopHitsIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("articles") .setQuery(matchQuery("title", "title")) .addAggregation( - nested("to-comments") - .path("comments") + nested("to-comments", "comments") .subAggregation( - nested("to-reviewers").path("comments.reviewers").subAggregation( + nested("to-reviewers", "comments.reviewers").subAggregation( // Also need to sort on _doc because there are two reviewers with the same name topHits("top-reviewers").sort("comments.reviewers.name", SortOrder.ASC).sort("_doc", SortOrder.DESC).size(7) ) @@ -848,7 +846,7 @@ public class TopHitsIT extends ESIntegTestCase { .prepareSearch("articles") .setQuery(nestedQuery("comments", matchQuery("comments.message", "comment").queryName("test"))) .addAggregation( - nested("to-comments").path("comments").subAggregation( + nested("to-comments", "comments").subAggregation( topHits("top-comments").size(1).highlighter(new HighlightBuilder().field(hlField)).explain(true) .fieldDataField("comments.user") .scriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())).fetchSource("message", null) @@ -897,8 +895,7 @@ public class TopHitsIT extends ESIntegTestCase { .interval(5) .order(Histogram.Order.aggregation("to-comments", true)) .subAggregation( - nested("to-comments") - .path("comments") + nested("to-comments", "comments") .subAggregation(topHits("comments") .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("comments.message").highlightQuery(matchQuery("comments.message", "text")))) .sort("comments.id", SortOrder.ASC)) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java new file mode 100644 index 00000000000..eb08e6f85a2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.percentiles; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class PercentilesMethodTests extends ESTestCase { + + public void testValidOrdinals() { + assertThat(PercentilesMethod.TDIGEST.ordinal(), equalTo(0)); + assertThat(PercentilesMethod.HDR.ordinal(), equalTo(1)); + } + + public void testwriteTo() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + PercentilesMethod.TDIGEST.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(0)); + } + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + PercentilesMethod.HDR.writeTo(out); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(in.readVInt(), equalTo(1)); + } + } + } + + public void testReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(0); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(PercentilesMethod.TDIGEST.readFrom(in), equalTo(PercentilesMethod.TDIGEST)); + } + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(1); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + assertThat(PercentilesMethod.TDIGEST.readFrom(in), equalTo(PercentilesMethod.HDR)); + } + } + } + + public void testInvalidReadFrom() throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); + try (StreamInput in = StreamInput.wrap(out.bytes())) { + PercentilesMethod.TDIGEST.readFrom(in); + fail("Expected IOException"); + } catch(IOException e) { + assertThat(e.getMessage(), containsString("Unknown PercentilesMethod ordinal [")); + } + + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java index ea0eb7fd93a..074ad419858 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java @@ -21,10 +21,12 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ESIntegTestCase; @@ -92,8 +94,8 @@ public class AvgBucketIT extends ESIntegTestCase { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .addAggregation(avgBucket("avg_bucket").setBucketsPaths("histo>_count")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .addAggregation(avgBucket("avg_bucket", "histo>_count")).execute().actionGet(); assertSearchResponse(response); @@ -130,8 +132,8 @@ public class AvgBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(avgBucket("avg_bucket").setBucketsPaths("histo>_count"))).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(avgBucket("avg_bucket", "histo>_count"))).execute().actionGet(); assertSearchResponse(response); @@ -173,7 +175,7 @@ public class AvgBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(avgBucket("avg_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(avgBucket("avg_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -212,9 +214,9 @@ public class AvgBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(avgBucket("avg_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + .subAggregation(avgBucket("avg_bucket", "histo>sum"))).execute().actionGet(); assertSearchResponse(response); @@ -265,9 +267,9 @@ public class AvgBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(avgBucket("avg_bucket").setBucketsPaths("histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(avgBucket("avg_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -311,8 +313,9 @@ public class AvgBucketIT extends ESIntegTestCase { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(avgBucket("avg_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(avgBucket("avg_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -337,9 +340,9 @@ public class AvgBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(avgBucket("avg_histo_bucket").setBucketsPaths("histo>_count"))) - .addAggregation(avgBucket("avg_terms_bucket").setBucketsPaths("terms>avg_histo_bucket")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(avgBucket("avg_histo_bucket", "histo>_count"))) + .addAggregation(avgBucket("avg_terms_bucket", "terms>avg_histo_bucket")).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumIT.java index 6f10e5d91fa..906aa3d4578 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.sum.Sum; @@ -88,8 +89,8 @@ public class CumulativeSumIT extends ESIntegTestCase { public void testDocCount() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) - .subAggregation(cumulativeSum("cumulative_sum").setBucketsPaths("_count"))).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) + .subAggregation(cumulativeSum("cumulative_sum", "_count"))).execute().actionGet(); assertSearchResponse(response); @@ -119,9 +120,9 @@ public class CumulativeSumIT extends ESIntegTestCase { .prepareSearch("idx") .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation(cumulativeSum("cumulative_sum").setBucketsPaths("sum"))).execute().actionGet(); + .subAggregation(cumulativeSum("cumulative_sum", "sum"))).execute().actionGet(); assertSearchResponse(response); @@ -153,7 +154,7 @@ public class CumulativeSumIT extends ESIntegTestCase { .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation(cumulativeSum("cumulative_sum").setBucketsPaths("sum"))).execute().actionGet(); + .subAggregation(cumulativeSum("cumulative_sum", "sum"))).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java index 793cd8419d6..0e0aa2dc0d6 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java @@ -28,9 +28,8 @@ public class CumulativeSumTests extends BasePipelineAggregationTestCaseget>sum"))).execute().actionGet(); + .subAggregation(derivative("deriv", "filters>get>sum"))).execute().actionGet(); fail("Expected an Exception but didn't get one"); } catch (Exception e) { Throwable cause = ExceptionsHelper.unwrapCause(e); if (cause == null) { throw e; - } else if (cause instanceof SearchPhaseExecutionException) { - ElasticsearchException[] rootCauses = ((SearchPhaseExecutionException) cause).guessRootCauses(); - // If there is more than one root cause then something - // unexpected happened and we should re-throw the original - // exception - if (rootCauses.length > 1) { - throw e; - } - ElasticsearchException rootCauseWrapper = rootCauses[0]; - Throwable rootCause = rootCauseWrapper.getCause(); - if (rootCause == null || !(rootCause instanceof IllegalArgumentException)) { - throw e; - } - } else { + } else if (!(cause instanceof IllegalArgumentException)) { throw e; } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java index b2cd1d4be62..74c58f6cdce 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java @@ -29,9 +29,8 @@ public class DerivativeTests extends BasePipelineAggregationTestCase_count")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .addAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count")).execute().actionGet(); assertSearchResponse(response); @@ -141,8 +143,8 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(extendedStatsBucket("extended_stats_bucket").setBucketsPaths("histo>_count"))).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count"))).execute().actionGet(); assertSearchResponse(response); @@ -193,7 +195,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(extendedStatsBucket("extended_stats_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(extendedStatsBucket("extended_stats_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -241,9 +243,9 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(extendedStatsBucket("extended_stats_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum"))).execute().actionGet(); assertSearchResponse(response); @@ -303,9 +305,9 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(extendedStatsBucket("extended_stats_bucket").setBucketsPaths("histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -358,8 +360,9 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(extendedStatsBucket("extended_stats_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(extendedStatsBucket("extended_stats_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -385,10 +388,10 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(extendedStatsBucket("extended_stats_bucket") - .setBucketsPaths("histo>sum").sigma(-1.0))).execute().actionGet(); + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum") + .sigma(-1.0))).execute().actionGet(); fail("Illegal sigma was provided but no exception was thrown."); } catch (SearchPhaseExecutionException exception) { // All good @@ -404,9 +407,9 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(extendedStatsBucket("avg_histo_bucket").setBucketsPaths("histo>_count"))) - .addAggregation(extendedStatsBucket("avg_terms_bucket").setBucketsPaths("terms>avg_histo_bucket.avg")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(extendedStatsBucket("avg_histo_bucket", "histo>_count"))) + .addAggregation(extendedStatsBucket("avg_terms_bucket", "terms>avg_histo_bucket.avg")).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java index 81b5735012e..82ed6b9d893 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java @@ -22,10 +22,12 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; @@ -96,8 +98,8 @@ public class MaxBucketIT extends ESIntegTestCase { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .addAggregation(maxBucket("max_bucket").setBucketsPaths("histo>_count")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .addAggregation(maxBucket("max_bucket", "histo>_count")).execute().actionGet(); assertSearchResponse(response); @@ -139,8 +141,8 @@ public class MaxBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>_count"))).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(maxBucket("max_bucket", "histo>_count"))).execute().actionGet(); assertSearchResponse(response); @@ -187,7 +189,7 @@ public class MaxBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(maxBucket("max_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(maxBucket("max_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -231,9 +233,9 @@ public class MaxBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + .subAggregation(maxBucket("max_bucket", "histo>sum"))).execute().actionGet(); assertSearchResponse(response); @@ -284,13 +286,12 @@ public class MaxBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation( - filter("filter") - .filter(termQuery("tag", "tag0")) + filter("filter", termQuery("tag", "tag0")) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + .subAggregation(maxBucket("max_bucket", "histo>sum"))).execute().actionGet(); assertSearchResponse(response); @@ -337,9 +338,9 @@ public class MaxBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(maxBucket("max_bucket").setBucketsPaths("histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(maxBucket("max_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -387,8 +388,9 @@ public class MaxBucketIT extends ESIntegTestCase { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(maxBucket("max_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(maxBucket("max_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -414,9 +416,9 @@ public class MaxBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(maxBucket("max_histo_bucket").setBucketsPaths("histo>_count"))) - .addAggregation(maxBucket("max_terms_bucket").setBucketsPaths("terms>max_histo_bucket")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(maxBucket("max_histo_bucket", "histo>_count"))) + .addAggregation(maxBucket("max_terms_bucket", "terms>max_histo_bucket")).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java index f02a85f130d..3d4ecf124e7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java @@ -21,10 +21,12 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; @@ -93,8 +95,8 @@ public class MinBucketIT extends ESIntegTestCase { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .addAggregation(minBucket("min_bucket").setBucketsPaths("histo>_count")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .addAggregation(minBucket("min_bucket", "histo>_count")).execute().actionGet(); assertSearchResponse(response); @@ -136,8 +138,8 @@ public class MinBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(minBucket("min_bucket").setBucketsPaths("histo>_count"))).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(minBucket("min_bucket", "histo>_count"))).execute().actionGet(); assertSearchResponse(response); @@ -184,7 +186,7 @@ public class MinBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(minBucket("min_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(minBucket("min_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -228,9 +230,9 @@ public class MinBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(minBucket("min_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + .subAggregation(minBucket("min_bucket", "histo>sum"))).execute().actionGet(); assertSearchResponse(response); @@ -286,9 +288,9 @@ public class MinBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(minBucket("min_bucket").setBucketsPaths("histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(minBucket("min_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -336,8 +338,9 @@ public class MinBucketIT extends ESIntegTestCase { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(minBucket("min_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(minBucket("min_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -363,9 +366,9 @@ public class MinBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(minBucket("min_histo_bucket").setBucketsPaths("histo>_count"))) - .addAggregation(minBucket("min_terms_bucket").setBucketsPaths("terms>min_histo_bucket")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(minBucket("min_histo_bucket", "histo>_count"))) + .addAggregation(minBucket("min_terms_bucket", "terms>min_histo_bucket")).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index c4dc267ec5b..4f03ccee437 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -23,8 +23,10 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucket; @@ -51,7 +53,7 @@ import static org.hamcrest.core.IsNull.notNullValue; public class PercentilesBucketIT extends ESIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; - private static final Double[] PERCENTS = {1.0, 25.0, 50.0, 75.0, 99.0}; + private static final double[] PERCENTS = {1.0, 25.0, 50.0, 75.0, 99.0}; static int numDocs; static int interval; static int minRandomValue; @@ -96,9 +98,8 @@ public class PercentilesBucketIT extends ESIntegTestCase { public void testDocCountopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .addAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("histo>_count") + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .addAggregation(percentilesBucket("percentiles_bucket", "histo>_count") .percents(PERCENTS)).execute().actionGet(); assertSearchResponse(response); @@ -139,9 +140,8 @@ public class PercentilesBucketIT extends ESIntegTestCase { .order(Terms.Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("histo>_count") + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") .percents(PERCENTS))).execute().actionGet(); assertSearchResponse(response); @@ -186,8 +186,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("terms>sum") + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .percents(PERCENTS)).execute().actionGet(); assertSearchResponse(response); @@ -224,8 +223,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -267,10 +265,9 @@ public class PercentilesBucketIT extends ESIntegTestCase { .order(Terms.Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("histo>sum") + .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") .percents(PERCENTS))).execute().actionGet(); assertSearchResponse(response); @@ -324,10 +321,9 @@ public class PercentilesBucketIT extends ESIntegTestCase { .order(Terms.Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("histo>sum") + .subAggregation(percentilesBucket("percentiles_bucket", "histo>sum") .gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) .percents(PERCENTS))) .execute().actionGet(); @@ -375,9 +371,9 @@ public class PercentilesBucketIT extends ESIntegTestCase { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("terms>sum") + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .percents(PERCENTS)).execute().actionGet(); assertSearchResponse(response); @@ -398,9 +394,9 @@ public class PercentilesBucketIT extends ESIntegTestCase { public void testWrongPercents() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("terms>sum") + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .percents(PERCENTS)).execute().actionGet(); assertSearchResponse(response); @@ -424,13 +420,12 @@ public class PercentilesBucketIT extends ESIntegTestCase { } public void testBadPercents() throws Exception { - Double[] badPercents = {-1.0, 110.0}; + double[] badPercents = {-1.0, 110.0}; try { client().prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("terms>sum") + .addAggregation(percentilesBucket("percentiles_bucket", "terms>sum") .percents(badPercents)).execute().actionGet(); fail("Illegal percent's were provided but no exception was thrown."); @@ -444,7 +439,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { } public void testBadPercents_asSubAgg() throws Exception { - Double[] badPercents = {-1.0, 110.0}; + double[] badPercents = {-1.0, 110.0}; try { client() @@ -455,9 +450,8 @@ public class PercentilesBucketIT extends ESIntegTestCase { .order(Terms.Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(percentilesBucket("percentiles_bucket") - .setBucketsPaths("histo>_count") + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(percentilesBucket("percentiles_bucket", "histo>_count") .percents(badPercents))).execute().actionGet(); fail("Illegal percent's were provided but no exception was thrown."); @@ -479,10 +473,9 @@ public class PercentilesBucketIT extends ESIntegTestCase { .order(Terms.Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(percentilesBucket("percentile_histo_bucket").setBucketsPaths("histo>_count"))) - .addAggregation(percentilesBucket("percentile_terms_bucket") - .setBucketsPaths("terms>percentile_histo_bucket.50") + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count"))) + .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket.50") .percents(PERCENTS)).execute().actionGet(); assertSearchResponse(response); @@ -536,7 +529,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { } public void testNestedWithDecimal() throws Exception { - Double[] percent = {99.9}; + double[] percent = {99.9}; SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -545,12 +538,10 @@ public class PercentilesBucketIT extends ESIntegTestCase { .order(Terms.Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(percentilesBucket("percentile_histo_bucket") - .percents(percent) - .setBucketsPaths("histo>_count"))) - .addAggregation(percentilesBucket("percentile_terms_bucket") - .setBucketsPaths("terms>percentile_histo_bucket[99.9]") + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(percentilesBucket("percentile_histo_bucket", "histo>_count") + .percents(percent))) + .addAggregation(percentilesBucket("percentile_terms_bucket", "terms>percentile_histo_bucket[99.9]") .percents(percent)).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java index 03ec5b971ce..2d0306cc854 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java @@ -29,9 +29,8 @@ public class SerialDifferenceTests extends BasePipelineAggregationTestCase_count")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .addAggregation(statsBucket("stats_bucket", "histo>_count")).execute().actionGet(); assertSearchResponse(response); @@ -137,8 +139,8 @@ public class StatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(statsBucket("stats_bucket").setBucketsPaths("histo>_count"))).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(statsBucket("stats_bucket", "histo>_count"))).execute().actionGet(); assertSearchResponse(response); @@ -186,7 +188,7 @@ public class StatsBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(statsBucket("stats_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(statsBucket("stats_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -231,9 +233,9 @@ public class StatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(statsBucket("stats_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + .subAggregation(statsBucket("stats_bucket", "histo>sum"))).execute().actionGet(); assertSearchResponse(response); @@ -290,9 +292,9 @@ public class StatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(statsBucket("stats_bucket").setBucketsPaths("histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(statsBucket("stats_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -342,8 +344,9 @@ public class StatsBucketIT extends ESIntegTestCase { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(statsBucket("stats_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(statsBucket("stats_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -368,9 +371,9 @@ public class StatsBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(statsBucket("avg_histo_bucket").setBucketsPaths("histo>_count"))) - .addAggregation(statsBucket("avg_terms_bucket").setBucketsPaths("terms>avg_histo_bucket.avg")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(statsBucket("avg_histo_bucket", "histo>_count"))) + .addAggregation(statsBucket("avg_terms_bucket", "terms>avg_histo_bucket.avg")).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java index ba13b553d89..fcbbf9b75cd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java @@ -21,10 +21,12 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ESIntegTestCase; @@ -92,8 +94,8 @@ public class SumBucketIT extends ESIntegTestCase { public void testDocCountTopLevel() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .addAggregation(sumBucket("sum_bucket").setBucketsPaths("histo>_count")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .addAggregation(sumBucket("sum_bucket", "histo>_count")).execute().actionGet(); assertSearchResponse(response); @@ -127,8 +129,8 @@ public class SumBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(sumBucket("sum_bucket").setBucketsPaths("histo>_count"))).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(sumBucket("sum_bucket", "histo>_count"))).execute().actionGet(); assertSearchResponse(response); @@ -167,7 +169,7 @@ public class SumBucketIT extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx") .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(sumBucket("sum_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(sumBucket("sum_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -203,9 +205,9 @@ public class SumBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(sumBucket("sum_bucket").setBucketsPaths("histo>sum"))).execute().actionGet(); + .subAggregation(sumBucket("sum_bucket", "histo>sum"))).execute().actionGet(); assertSearchResponse(response); @@ -253,9 +255,9 @@ public class SumBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(sumBucket("sum_bucket").setBucketsPaths("histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(sumBucket("sum_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -296,8 +298,9 @@ public class SumBucketIT extends ESIntegTestCase { public void testNoBuckets() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(sumBucket("sum_bucket").setBucketsPaths("terms>sum")).execute().actionGet(); + .addAggregation(terms("terms").field("tag").includeExclude(new IncludeExclude(null, "tag.*")) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(sumBucket("sum_bucket", "terms>sum")).execute().actionGet(); assertSearchResponse(response); @@ -322,9 +325,9 @@ public class SumBucketIT extends ESIntegTestCase { .order(Order.term(true)) .subAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) minRandomValue, (long) maxRandomValue)) - .subAggregation(sumBucket("sum_histo_bucket").setBucketsPaths("histo>_count"))) - .addAggregation(sumBucket("sum_terms_bucket").setBucketsPaths("terms>sum_histo_bucket")).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) minRandomValue, (long) maxRandomValue))) + .subAggregation(sumBucket("sum_histo_bucket", "histo>_count"))) + .addAggregation(sumBucket("sum_terms_bucket", "terms>sum_histo_bucket")).execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AbstractBucketMetricsTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AbstractBucketMetricsTestCase.java index 8cfea91c817..5da6238c8c2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AbstractBucketMetricsTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AbstractBucketMetricsTestCase.java @@ -27,9 +27,8 @@ public abstract class AbstractBucketMetricsTestCase { @Override - protected Factory doCreateTestAggregatorFactory(String name, String[] bucketsPaths) { - return new Factory(name, bucketsPaths); + protected Factory doCreateTestAggregatorFactory(String name, String bucketsPath) { + return new Factory(name, bucketsPath); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java index 03d7c69a63f..e4672730af4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extend public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase { @Override - protected Factory doCreateTestAggregatorFactory(String name, String[] bucketsPaths) { - Factory factory = new Factory(name, bucketsPaths); + protected Factory doCreateTestAggregatorFactory(String name, String bucketsPath) { + Factory factory = new Factory(name, bucketsPath); if (randomBoolean()) { factory.sigma(randomDoubleBetween(0.0, 10.0, false)); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java index 74fc39ecad9..dd35fbb6225 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucke public class MaxBucketTests extends AbstractBucketMetricsTestCase { @Override - protected Factory doCreateTestAggregatorFactory(String name, String[] bucketsPaths) { - return new Factory(name, bucketsPaths); + protected Factory doCreateTestAggregatorFactory(String name, String bucketsPath) { + return new Factory(name, bucketsPath); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java index bc8fd2a2597..745e0611cbd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucke public class MinBucketTests extends AbstractBucketMetricsTestCase { @Override - protected Factory doCreateTestAggregatorFactory(String name, String[] bucketsPaths) { - return new Factory(name, bucketsPaths); + protected Factory doCreateTestAggregatorFactory(String name, String bucketsPath) { + return new Factory(name, bucketsPath); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java index 60785848d56..200cd261123 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.P public class PercentilesBucketTests extends AbstractBucketMetricsTestCase { @Override - protected Factory doCreateTestAggregatorFactory(String name, String[] bucketsPaths) { - Factory factory = new Factory(name, bucketsPaths); + protected Factory doCreateTestAggregatorFactory(String name, String bucketsPath) { + Factory factory = new Factory(name, bucketsPath); if (randomBoolean()) { int numPercents = randomIntBetween(1, 20); double[] percents = new double[numPercents]; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java index 0aa8df0bf9d..22a0774b2a4 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsB public class StatsBucketTests extends AbstractBucketMetricsTestCase { @Override - protected Factory doCreateTestAggregatorFactory(String name, String[] bucketsPaths) { - return new Factory(name, bucketsPaths); + protected Factory doCreateTestAggregatorFactory(String name, String bucketsPath) { + return new Factory(name, bucketsPath); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java index a7d6b5a9273..02448cd6491 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucke public class SumBucketTests extends AbstractBucketMetricsTestCase { @Override - protected Factory doCreateTestAggregatorFactory(String name, String[] bucketsPaths) { - return new Factory(name, bucketsPaths); + protected Factory doCreateTestAggregatorFactory(String name, String bucketsPath) { + return new Factory(name, bucketsPath); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 35ecf95113c..e23df88a385 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.collect.EvictingQueue; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket; @@ -409,18 +410,16 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts","_count") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .gapPolicy(gapPolicy)) + .subAggregation(movingAvg("movavg_values","the_metric") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); assertSearchResponse(response); @@ -459,18 +458,16 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) .modelBuilder(new LinearModel.LinearModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .gapPolicy(gapPolicy)) + .subAggregation(movingAvg("movavg_values", "the_metric") .window(windowSize) .modelBuilder(new LinearModel.LinearModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); assertSearchResponse(response); @@ -509,18 +506,16 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) .modelBuilder(new EwmaModel.EWMAModelBuilder().alpha(alpha)) - .gapPolicy(gapPolicy) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .gapPolicy(gapPolicy)) + .subAggregation(movingAvg("movavg_values", "the_metric") .window(windowSize) .modelBuilder(new EwmaModel.EWMAModelBuilder().alpha(alpha)) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); assertSearchResponse(response); @@ -559,18 +554,16 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) - .gapPolicy(gapPolicy) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .gapPolicy(gapPolicy)) + .subAggregation(movingAvg("movavg_values", "the_metric") .window(windowSize) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); assertSearchResponse(response); @@ -609,22 +602,20 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) - .minimize(false) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .minimize(false)) + .subAggregation(movingAvg("movavg_values", "the_metric") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) - .minimize(false) - .setBucketsPaths("the_metric")) + .minimize(false)) ).execute().actionGet(); assertSearchResponse(response); @@ -669,8 +660,8 @@ public class MovAvgIT extends ESIntegTestCase { .interval(1) .subAggregation(avg("avg").field(VALUE_FIELD)) .subAggregation( - movingAvg("movavg_values").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy).predict(5).setBucketsPaths("avg"))).execute().actionGet(); + movingAvg("movavg_values", "avg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy).predict(5))).execute().actionGet(); assertSearchResponse(response); @@ -715,13 +706,12 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "the_metric") .window(0) .modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); fail("MovingAvg should not accept a window that is zero"); } catch (SearchPhaseExecutionException e) { @@ -736,11 +726,10 @@ public class MovAvgIT extends ESIntegTestCase { .addAggregation( range("histo").field(INTERVAL_FIELD).addRange(0, 10) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "the_metric") .window(0) .modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); fail("MovingAvg should not accept non-histogram as parent"); @@ -755,13 +744,12 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(-10) .modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("_count")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); fail("MovingAvg should not accept a window that is negative"); @@ -779,11 +767,10 @@ public class MovAvgIT extends ESIntegTestCase { .addAggregation( histogram("histo").field("test").interval(interval) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); assertSearchResponse(response); @@ -802,11 +789,10 @@ public class MovAvgIT extends ESIntegTestCase { .addAggregation( histogram("histo").field("test").interval(interval) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric") .predict(numPredictions)) ).execute().actionGet(); @@ -825,18 +811,17 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) .modelBuilder(randomModelBuilder()) .gapPolicy(gapPolicy) - .predict(0) - .setBucketsPaths("the_metric")) + .predict(0)) ).execute().actionGet(); fail("MovingAvg should not accept a prediction size that is zero"); - } catch (SearchPhaseExecutionException exception) { + } catch (IllegalArgumentException exception) { // All Good } } @@ -847,18 +832,17 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) .modelBuilder(randomModelBuilder()) .gapPolicy(gapPolicy) - .predict(-10) - .setBucketsPaths("the_metric")) + .predict(-10)) ).execute().actionGet(); fail("MovingAvg should not accept a prediction size that is negative"); - } catch (SearchPhaseExecutionException exception) { + } catch (IllegalArgumentException exception) { // All Good } } @@ -869,20 +853,18 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(10) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) - .gapPolicy(gapPolicy) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .gapPolicy(gapPolicy)) + .subAggregation(movingAvg("movavg_values", "the_metric") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) - .gapPolicy(gapPolicy) - .setBucketsPaths("the_metric")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { // All good @@ -899,14 +881,13 @@ public class MovAvgIT extends ESIntegTestCase { .field(INTERVAL_FIELD) .interval(1) .subAggregation(avg("avg").field(VALUE_FIELD)) - .subAggregation(derivative("deriv") - .setBucketsPaths("avg").gapPolicy(gapPolicy)) + .subAggregation(derivative("deriv", "avg").gapPolicy(gapPolicy)) .subAggregation( - movingAvg("avg_movavg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy).predict(12).setBucketsPaths("avg")) + movingAvg("avg_movavg", "avg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy).predict(12)) .subAggregation( - movingAvg("deriv_movavg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy).predict(12).setBucketsPaths("deriv")) + movingAvg("deriv_movavg", "deriv").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy).predict(12)) ).execute().actionGet(); assertSearchResponse(response); @@ -1008,13 +989,12 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(10) .modelBuilder(randomModelBuilder(100)) - .gapPolicy(gapPolicy) - .setBucketsPaths("_count")) + .gapPolicy(gapPolicy)) ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { // All good @@ -1027,22 +1007,20 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) - .minimize(true) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .minimize(true)) + .subAggregation(movingAvg("movavg_values", "the_metric") .window(windowSize) .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .period(period).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy) - .minimize(true) - .setBucketsPaths("the_metric")) + .minimize(true)) ).execute().actionGet(); assertSearchResponse(response); @@ -1115,20 +1093,18 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) .gapPolicy(gapPolicy) - .minimize(true) - .setBucketsPaths("_count")) - .subAggregation(movingAvg("movavg_values") + .minimize(true)) + .subAggregation(movingAvg("movavg_values", "the_metric") .window(numBuckets) .modelBuilder(new HoltLinearModel.HoltLinearModelBuilder().alpha(alpha).beta(beta)) .gapPolicy(gapPolicy) - .minimize(true) - .setBucketsPaths("the_metric")) + .minimize(true)) ).execute().actionGet(); assertSearchResponse(response); @@ -1171,14 +1147,13 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) .modelBuilder(new SimpleModel.SimpleModelBuilder()) .gapPolicy(gapPolicy) - .minimize(true) - .setBucketsPaths("_count")) + .minimize(true)) ).execute().actionGet(); fail("Simple Model cannot be minimized, but an exception was not thrown"); } catch (SearchPhaseExecutionException e) { @@ -1190,14 +1165,13 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) .modelBuilder(new LinearModel.LinearModelBuilder()) .gapPolicy(gapPolicy) - .minimize(true) - .setBucketsPaths("_count")) + .minimize(true)) ).execute().actionGet(); fail("Linear Model cannot be minimized, but an exception was not thrown"); } catch (SearchPhaseExecutionException e) { @@ -1221,14 +1195,13 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(new ExtendedBounds(0L, (long) (interval * (numBuckets - 1)))) .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") + .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) .modelBuilder(builder) .gapPolicy(gapPolicy) - .minimize(true) - .setBucketsPaths("_count")) + .minimize(true)) ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { fail("Model [" + builder.toString() + "] can be minimized, but an exception was thrown"); @@ -1236,43 +1209,6 @@ public class MovAvgIT extends ESIntegTestCase { } } - public void testUnrecognizedParams() { - MovAvgModelBuilder[] builders = new MovAvgModelBuilder[]{ - new SimpleModel.SimpleModelBuilder(), - new LinearModel.LinearModelBuilder(), - new EwmaModel.EWMAModelBuilder(), - new HoltLinearModel.HoltLinearModelBuilder(), - new HoltWintersModel.HoltWintersModelBuilder() - }; - Map badSettings = new HashMap<>(1); - badSettings.put("abc", 1.2); - - for (MovAvgModelBuilder builder : builders) { - try { - SearchResponse response = client() - .prepareSearch("idx").setTypes("type") - .addAggregation( - histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) - .subAggregation(metric) - .subAggregation(movingAvg("movavg_counts") - .window(10) - .modelBuilder(builder) - .gapPolicy(gapPolicy) - .settings(badSettings) - .setBucketsPaths("_count")) - ).execute().actionGet(); - } catch (SearchPhaseExecutionException e) { - // All good - } - } - - - - - } - - private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java index 6767a305577..d12466fed21 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java @@ -35,9 +35,8 @@ public class MovAvgTests extends BasePipelineAggregationTestCase histogram = response.getAggregations().get("histogram"); assertThat(histogram, notNullValue()); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java index 06119fd6a76..28549a12958 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java @@ -113,8 +113,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum", "field3Sum", "field4Sum").script( - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null)))).execute().actionGet(); + bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null) + , "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -157,8 +157,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum", "field3Sum", "field4Sum").script( - new Script("_value0 + _value1 / _value2", ScriptType.INLINE, null, null)))).execute().actionGet(); + bucketScript("seriesArithmetic", new Script("_value0 + _value1 / _value2", ScriptType.INLINE, null, null), + "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -199,8 +199,8 @@ public class BucketScriptTests extends ESIntegTestCase { .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum").script( - new Script("_value0", ScriptType.INLINE, null, null)))).execute().actionGet(); + bucketScript("seriesArithmetic", new Script("_value0", ScriptType.INLINE, null, null), + "field2Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -241,7 +241,7 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPathsMap(bucketsPathsMap ).script( + bucketScript("seriesArithmetic", bucketsPathsMap, new Script("foo + bar + baz", ScriptType.INLINE, null, null)))).execute().actionGet(); assertSearchResponse(response); @@ -287,8 +287,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum", "field3Sum", "field4Sum").script( - new Script("(_value0 + _value1 + _value2) * factor", ScriptType.INLINE, null, params)))).execute().actionGet(); + bucketScript("seriesArithmetic", new Script("(_value0 + _value1 + _value2) * factor", ScriptType.INLINE, null, params), + "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -331,8 +331,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum", "field3Sum", "field4Sum").script( - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null)).gapPolicy(GapPolicy.INSERT_ZEROS))).execute().actionGet(); + bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null), + "field2Sum", "field3Sum", "field4Sum").gapPolicy(GapPolicy.INSERT_ZEROS))).execute().actionGet(); assertSearchResponse(response); @@ -377,8 +377,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum", "field3Sum", "field4Sum").script( - new Script("my_script", ScriptType.INDEXED, null, null)))).execute().actionGet(); + bucketScript("seriesArithmetic", new Script("my_script", ScriptType.INDEXED, null, null), + "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -421,8 +421,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum", "field3Sum", "field4Sum").script( - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null)))) + bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null), + "field2Sum", "field3Sum", "field4Sum"))) .execute().actionGet(); assertSearchResponse(response); @@ -444,8 +444,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic").setBucketsPaths("field2Sum", "field3Sum", "field4Sum").script( - new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null)))).execute().actionGet(); + bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null), + "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java index a1faea0b5e5..c78e51330e4 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.script.groovy.GroovyScriptEngineService; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket; @@ -45,8 +46,8 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketSelector; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.having; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -125,9 +126,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, - null, null)))).execute() + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", + ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute() .actionGet(); assertSearchResponse(response); @@ -159,9 +159,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("Double.isNaN(_value0) ? true : (_value0 < 10000)", ScriptType.INLINE, null, - null)))).execute() + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? true : (_value0 < 10000)", + ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute() .actionGet(); assertSearchResponse(response); @@ -193,9 +192,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("Double.isNaN(_value0) ? false : (_value0 > 10000)", ScriptType.INLINE, null, - null)))).execute().actionGet(); + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 > 10000)", + ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -216,9 +214,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("Double.isNaN(_value0) ? false : (_value0 < _value1)", ScriptType.INLINE, null, - null)))).execute().actionGet(); + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 < _value1)", + ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -248,9 +245,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum") - .script(new Script("Double.isNaN(_value0) ? false : (_value0 > 100)", ScriptType.INLINE, - null, null)))).execute().actionGet(); + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 > 100)", + ScriptType.INLINE,null, null), "field2Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -282,9 +278,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPathsMap(bucketPathsMap).script( - new Script("Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", - ScriptType.INLINE, null, null)))).execute() + bucketSelector("bucketSelector", bucketPathsMap, new Script("Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", + ScriptType.INLINE, null, null)))).execute() .actionGet(); assertSearchResponse(response); @@ -318,9 +313,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", - ScriptType.INLINE, null, params)))).execute() + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", + ScriptType.INLINE, null, params), "field2Sum", "field3Sum"))).execute() .actionGet(); assertSearchResponse(response); @@ -352,8 +346,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").gapPolicy(GapPolicy.INSERT_ZEROS) - .script(new Script("_value0 + _value1 > 100", ScriptType.INLINE, null, null)))) + bucketSelector("bucketSelector", new Script("_value0 + _value1 > 100", ScriptType.INLINE, null, null), + "field2Sum", "field3Sum").gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -385,8 +379,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("my_script", ScriptType.INDEXED, null, null)))).execute().actionGet(); + bucketSelector("bucketSelector", new Script("my_script", ScriptType.INDEXED, null, null), + "field2Sum", "field3Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -417,9 +411,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, - null, null)))).execute() + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", + ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute() .actionGet(); assertSearchResponse(response); @@ -440,9 +433,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation( - having("having").setBucketsPaths("field2Sum", "field3Sum").script( - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, - null, null)))).execute() + bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", + ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute() .actionGet(); assertSearchResponse(response); @@ -467,8 +459,8 @@ public class BucketSelectorTests extends ESIntegTestCase { public void testEmptyBuckets() { SearchResponse response = client().prepareSearch("idx_with_gaps") .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(1) - .subAggregation(histogram("inner_histo").field(FIELD_1_NAME).interval(1).extendedBounds(1l, 4l).minDocCount(0) - .subAggregation(derivative("derivative").setBucketsPaths("_count").gapPolicy(GapPolicy.INSERT_ZEROS)))) + .subAggregation(histogram("inner_histo").field(FIELD_1_NAME).interval(1).extendedBounds(new ExtendedBounds(1l, 4l)) + .minDocCount(0).subAggregation(derivative("derivative", "_count").gapPolicy(GapPolicy.INSERT_ZEROS)))) .execute().actionGet(); assertSearchResponse(response); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java index ba4ca38d16e..6a06e4591d8 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; -import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeBuilder; +import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.min.Min; import org.elasticsearch.search.aggregations.metrics.sum.Sum; @@ -112,7 +112,7 @@ public class DateRangeTests extends ESIntegTestCase { } public void testDateMath() throws Exception { - DateRangeBuilder rangeBuilder = dateRange("range"); + DateRangeAggregatorFactory rangeBuilder = dateRange("range"); if (randomBoolean()) { rangeBuilder.field("date"); } else { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java index d0de4c7fd85..de6fc52783d 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java @@ -31,11 +31,13 @@ import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -313,8 +315,7 @@ public class DoubleTermsTests extends AbstractTermsTestCase { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .field(SINGLE_VALUED_FIELD_NAME) - .include(includes) - .exclude(excludes) + .includeExclude(new IncludeExclude(includes, excludes)) .collectMode(randomFrom(SubAggCollectionMode.values()))) .execute().actionGet(); assertSearchResponse(response); @@ -710,7 +711,7 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .collectMode(randomFrom(SubAggCollectionMode.values())) .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")) - .valueType(Terms.ValueType.DOUBLE) + .valueType(ValueType.DOUBLE) .subAggregation(sum("sum"))) .execute().actionGet(); @@ -884,7 +885,7 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .addAggregation( terms("num_tags").field("num_tag").collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter", asc)) - .subAggregation(filter("filter").filter(QueryBuilders.matchAllQuery()))).execute().actionGet(); + .subAggregation(filter("filter", QueryBuilders.matchAllQuery()))).execute().actionGet(); assertSearchResponse(response); @@ -924,8 +925,8 @@ public class DoubleTermsTests extends AbstractTermsTestCase { .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter1>filter2>max", asc)) .subAggregation( - filter("filter1").filter(QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2").filter(QueryBuilders.matchAllQuery()).subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation( max("max").field(SINGLE_VALUED_FIELD_NAME))))).execute().actionGet(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java index 584a8d2c284..89a03d62b09 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java @@ -35,8 +35,8 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; +import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; -import org.elasticsearch.search.aggregations.bucket.range.RangeBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.sum.Sum; @@ -122,7 +122,7 @@ public class EquivalenceTests extends ESIntegTestCase { } } - RangeBuilder query = range("range").field("values"); + RangeAggregator.Factory query = range("range").field("values"); for (int i = 0; i < ranges.length; ++i) { String key = Integer.toString(i); if (ranges[i][0] == Double.NEGATIVE_INFINITY) { @@ -143,7 +143,7 @@ public class EquivalenceTests extends ESIntegTestCase { if (ranges[i][1] != Double.POSITIVE_INFINITY){ filter = filter.to(ranges[i][1]); } - reqBuilder = reqBuilder.addAggregation(filter("filter" + i).filter(filter)); + reqBuilder = reqBuilder.addAggregation(filter("filter" + i, filter)); } SearchResponse resp = reqBuilder.execute().actionGet(); @@ -354,7 +354,7 @@ public class EquivalenceTests extends ESIntegTestCase { indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value)); ensureYellow("idx"); // only one document let's make sure all shards have an active primary SearchResponse response = client().prepareSearch("idx") - .addAggregation(filter("filter").filter(QueryBuilders.matchAllQuery()) + .addAggregation(filter("filter", QueryBuilders.matchAllQuery()) .subAggregation(range("range") .field("f") .addUnboundedTo(6) diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java index 7e7e8cb76fa..c5bf370e028 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java @@ -77,7 +77,7 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { } } Arrays.sort(percents); - Loggers.getLogger(HDRPercentileRanksTests.class).info("Using percentiles={}", Arrays.toString(percents)); + Loggers.getLogger(HDRPercentileRanksTests.class).info("Using values={}", Arrays.toString(percents)); return percents; } @@ -85,8 +85,8 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { return randomIntBetween(0, 5); } - private void assertConsistent(double[] pcts, PercentileRanks percentiles, long minValue, long maxValue, int numberSigDigits) { - final List percentileList = iterableAsArrayList(percentiles); + private void assertConsistent(double[] pcts, PercentileRanks values, long minValue, long maxValue, int numberSigDigits) { + final List percentileList = iterableAsArrayList(values); assertEquals(pcts.length, percentileList.size()); for (int i = 0; i < pcts.length; ++i) { final Percentile percentile = percentileList.get(i); @@ -122,7 +122,8 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .minDocCount(0) .subAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits).percentiles(10, 15))).execute().actionGet(); + .numberOfSignificantValueDigits(sigDigits).values(10, 15))) + .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -145,7 +146,8 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").percentiles(0, 10, 15, 100)).execute().actionGet(); + .field("value").values(0, 10, 15, 100)) + .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l)); @@ -167,12 +169,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").percentiles(pcts)).execute().actionGet(); + .field("value").values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue, sigDigits); } @Override @@ -185,7 +188,8 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .addAggregation( global("global").subAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").percentiles(pcts))).execute().actionGet(); + .field("value").values(pcts))) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -196,10 +200,10 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { assertThat(global.getAggregations(), notNullValue()); assertThat(global.getAggregations().asMap().size(), equalTo(1)); - PercentileRanks percentiles = global.getAggregations().get("percentile_ranks"); - assertThat(percentiles, notNullValue()); - assertThat(percentiles.getName(), equalTo("percentile_ranks")); - assertThat((PercentileRanks) global.getProperty("percentile_ranks"), sameInstance(percentiles)); + PercentileRanks values = global.getAggregations().get("percentile_ranks"); + assertThat(values, notNullValue()); + assertThat(values.getName(), equalTo("percentile_ranks")); + assertThat((PercentileRanks) global.getProperty("percentile_ranks"), sameInstance(values)); } @@ -211,12 +215,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").percentiles(pcts)).execute().actionGet(); + .field("value").values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue, sigDigits); } @Override @@ -228,12 +233,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").percentiles(pcts)).execute().actionGet(); + .field("value").values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue, sigDigits); } @Override @@ -245,12 +251,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); + .field("value").script(new Script("_value - 1")).values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits); } @Override @@ -264,13 +271,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) + .field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params)).values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits); } @Override @@ -282,12 +289,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values").percentiles(pcts)).execute().actionGet(); + .field("values").values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues, maxValues, sigDigits); } @Override @@ -299,12 +307,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values").script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); + .field("values").script(new Script("_value - 1")).values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits); } public void testMultiValuedFieldWithValueScriptReverse() throws Exception { @@ -315,12 +324,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values").script(new Script("20 - _value")).percentiles(pcts)).execute().actionGet(); + .field("values").script(new Script("20 - _value")).values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, 20 - maxValues, 20 - minValues, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, 20 - maxValues, 20 - minValues, sigDigits); } @Override @@ -334,13 +344,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) + .field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params)).values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits); } @Override @@ -352,12 +362,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .script(new Script("doc['value'].value")).percentiles(pcts)).execute().actionGet(); + .script(new Script("doc['value'].value")).values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue, sigDigits); } @Override @@ -371,13 +382,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) + .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue - 1, maxValue - 1, sigDigits); } @Override @@ -389,12 +400,13 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet(); + .script(new Script("doc['values'].values")).values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues, maxValues, sigDigits); } @Override @@ -412,12 +424,14 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .numberOfSignificantValueDigits(sigDigits) .script(new Script( "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", - ScriptType.INLINE, null, params)).percentiles(pcts)).execute().actionGet(); + ScriptType.INLINE, null, params)) + .values(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1, sigDigits); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues - 1, maxValues - 1, sigDigits); } public void testOrderBySubAggregation() { @@ -430,7 +444,7 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { histogram("histo").field("value").interval(2l) .subAggregation( percentileRanks("percentile_ranks").method(PercentilesMethod.HDR) - .numberOfSignificantValueDigits(sigDigits).percentiles(99)) + .numberOfSignificantValueDigits(sigDigits).values(99)) .order(Order.aggregation("percentile_ranks", "99", asc))).execute().actionGet(); assertHitCount(searchResponse, 10); @@ -438,8 +452,8 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { Histogram histo = searchResponse.getAggregations().get("histo"); double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; for (Histogram.Bucket bucket : histo.getBuckets()) { - PercentileRanks percentiles = bucket.getAggregations().get("percentile_ranks"); - double p99 = percentiles.percent(99); + PercentileRanks values = bucket.getAggregations().get("percentile_ranks"); + double p99 = values.percent(99); if (asc) { assertThat(p99, greaterThanOrEqualTo(previous)); } else { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java index 7ea5c673e4e..1b1fa921207 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -556,7 +557,7 @@ public class HistogramTests extends ESIntegTestCase { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("filter>max", asc)) - .subAggregation(filter("filter").filter(matchAllQuery()) + .subAggregation(filter("filter", matchAllQuery()) .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) .execute().actionGet(); @@ -896,7 +897,7 @@ public class HistogramTests extends ESIntegTestCase { .prepareSearch("idx", "idx_unmapped") .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds((long) -1 * 2 * interval, (long) valueCounts.length * interval)).execute().actionGet(); + .extendedBounds(new ExtendedBounds((long) -1 * 2 * interval, (long) valueCounts.length * interval))).execute().actionGet(); assertSearchResponse(response); @@ -986,7 +987,7 @@ public class HistogramTests extends ESIntegTestCase { .field(SINGLE_VALUED_FIELD_NAME) .interval(interval) .minDocCount(0) - .extendedBounds(boundsMin, boundsMax)) + .extendedBounds(new ExtendedBounds(boundsMin, boundsMax))) .execute().actionGet(); if (invalidBoundsError) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java index 638d9238b45..0f79ca8fdfd 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java @@ -30,11 +30,13 @@ import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -289,8 +291,7 @@ public class LongTermsTests extends AbstractTermsTestCase { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation(terms("terms") .field(SINGLE_VALUED_FIELD_NAME) - .include(includes) - .exclude(excludes) + .includeExclude(new IncludeExclude(includes, excludes)) .collectMode(randomFrom(SubAggCollectionMode.values()))) .execute().actionGet(); assertSearchResponse(response); @@ -706,7 +707,7 @@ public class LongTermsTests extends AbstractTermsTestCase { .addAggregation(terms("terms") .collectMode(randomFrom(SubAggCollectionMode.values())) .script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")) - .valueType(Terms.ValueType.LONG) + .valueType(ValueType.LONG) .subAggregation(sum("sum"))) .execute().actionGet(); @@ -876,7 +877,7 @@ public class LongTermsTests extends AbstractTermsTestCase { .field("num_tag") .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter", asc)) - .subAggregation(filter("filter").filter(QueryBuilders.matchAllQuery())) +.subAggregation(filter("filter", QueryBuilders.matchAllQuery())) ).execute().actionGet(); @@ -913,8 +914,8 @@ public class LongTermsTests extends AbstractTermsTestCase { .field("num_tag") .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter1>filter2>max", asc)) - .subAggregation(filter("filter1").filter(QueryBuilders.matchAllQuery()) - .subAggregation(filter("filter2").filter(QueryBuilders.matchAllQuery()) + .subAggregation(filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()) .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) ).execute().actionGet(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index 9b3e1a342a8..a348577684a 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -35,7 +35,8 @@ import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -110,17 +111,17 @@ public class MinDocCountTests extends AbstractTermsTestCase { private enum Script { NO { @Override - TermsBuilder apply(TermsBuilder builder, String field) { + TermsAggregatorFactory apply(TermsAggregatorFactory builder, String field) { return builder.field(field); } }, YES { @Override - TermsBuilder apply(TermsBuilder builder, String field) { + TermsAggregatorFactory apply(TermsAggregatorFactory builder, String field) { return builder.script(new org.elasticsearch.script.Script("doc['" + field + "'].values")); } }; - abstract TermsBuilder apply(TermsBuilder builder, String field); + abstract TermsAggregatorFactory apply(TermsAggregatorFactory builder, String field); } // check that terms2 is a subset of terms1 @@ -297,7 +298,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { .executionHint(randomExecutionHint()) .order(order) .size(size) - .include(include) + .includeExclude(include == null ? null : new IncludeExclude(include, null)) .shardSize(cardinality + randomInt(10)) .minDocCount(minDocCount)).request(); final SearchResponse response = client().search(request).get(); @@ -377,7 +378,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).order(order).minDocCount(0)) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).order(order).minDocCount(0)) .execute().actionGet(); final Histogram allHisto = allResponse.getAggregations().get("histo"); @@ -386,7 +387,7 @@ public class MinDocCountTests extends AbstractTermsTestCase { final SearchResponse response = client().prepareSearch("idx").setTypes("type") .setSize(0) .setQuery(QUERY) - .addAggregation(dateHistogram("histo").field("date").interval(DateHistogramInterval.DAY).order(order).minDocCount(minDocCount)) + .addAggregation(dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).order(order).minDocCount(minDocCount)) .execute().actionGet(); assertSubset(allHisto, (Histogram) response.getAggregations().get("histo"), minDocCount); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java index 5d78b99ded9..288d5ed1857 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RangeTests.java @@ -42,8 +42,8 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java index 55672a0ce47..f4b7fc7adf5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.messy.tests; +import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -36,6 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; @@ -273,7 +275,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .setTypes("high_card_type") .addAggregation( terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).include("val00.+")).execute().actionGet(); + .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude("val00.+", null))) + .execute().actionGet(); assertSearchResponse(response); @@ -297,7 +300,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .setTypes("high_card_type") .addAggregation( terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).include("val00.+").exclude("(val000|val001)")) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .includeExclude(new IncludeExclude("val00.+", "(val000|val001)"))) .execute().actionGet(); assertSearchResponse(response); @@ -322,7 +326,9 @@ public class StringTermsTests extends AbstractTermsTestCase { .setTypes("high_card_type") .addAggregation( terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).exclude("val0[1-9]+.+")).execute().actionGet(); + .collectMode(randomFrom(SubAggCollectionMode.values())) + .includeExclude(new IncludeExclude(null, new RegExp("val0[1-9]+.+")))) + .execute().actionGet(); assertSearchResponse(response); @@ -347,7 +353,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .setTypes("high_card_type") .addAggregation( terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).include(incVals)).execute().actionGet(); + .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude(incVals, null))) + .execute().actionGet(); assertSearchResponse(response); @@ -374,7 +381,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .setTypes("high_card_type") .addAggregation( terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).include(incVals).exclude(excVals)).execute() + .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude(incVals, excVals))) + .execute() .actionGet(); assertSearchResponse(response); @@ -397,7 +405,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .setTypes("high_card_type") .addAggregation( terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).exclude(excVals)).execute().actionGet(); + .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude(null, excVals))) + .execute().actionGet(); assertSearchResponse(response); @@ -907,7 +916,7 @@ public class StringTermsTests extends AbstractTermsTestCase { .prepareSearch("idx") .setTypes("type") .addAggregation( - filter("filter").filter(termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation( + filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation( terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))) .execute().actionGet(); @@ -1014,7 +1023,7 @@ public class StringTermsTests extends AbstractTermsTestCase { .addAggregation( terms("tags").executionHint(randomExecutionHint()).field("tag") .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.aggregation("filter", asc)) - .subAggregation(filter("filter").filter(QueryBuilders.matchAllQuery()))).execute().actionGet(); + .subAggregation(filter("filter", QueryBuilders.matchAllQuery()))).execute().actionGet(); assertSearchResponse(response); @@ -1054,8 +1063,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter1>filter2>stats.max", asc)) .subAggregation( - filter("filter1").filter(QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2").filter(QueryBuilders.matchAllQuery()).subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation( stats("stats").field("i"))))).execute().actionGet(); assertSearchResponse(response); @@ -1117,8 +1126,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter1>" + filter2Name + ">" + statsName + ".max", asc)) .subAggregation( - filter("filter1").filter(QueryBuilders.matchAllQuery()).subAggregation( - filter(filter2Name).filter(QueryBuilders.matchAllQuery()).subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation( stats(statsName).field("i"))))).execute().actionGet(); assertSearchResponse(response); @@ -1180,8 +1189,8 @@ public class StringTermsTests extends AbstractTermsTestCase { .collectMode(randomFrom(SubAggCollectionMode.values())) .order(Terms.Order.aggregation("filter1>" + filter2Name + ">" + statsName + "[max]", asc)) .subAggregation( - filter("filter1").filter(QueryBuilders.matchAllQuery()).subAggregation( - filter(filter2Name).filter(QueryBuilders.matchAllQuery()).subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation( stats(statsName).field("i"))))).execute().actionGet(); assertSearchResponse(response); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java index 20916c33205..ae23856dc33 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksBuilder; +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorFactory; import java.util.Arrays; import java.util.Collection; @@ -78,19 +78,19 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { } } Arrays.sort(percents); - Loggers.getLogger(TDigestPercentileRanksTests.class).info("Using percentiles={}", Arrays.toString(percents)); + Loggers.getLogger(TDigestPercentileRanksTests.class).info("Using values={}", Arrays.toString(percents)); return percents; } - private static PercentileRanksBuilder randomCompression(PercentileRanksBuilder builder) { + private static PercentileRanksAggregatorFactory randomCompression(PercentileRanksAggregatorFactory builder) { if (randomBoolean()) { builder.compression(randomIntBetween(20, 120) + randomDouble()); } return builder; } - private void assertConsistent(double[] pcts, PercentileRanks percentiles, long minValue, long maxValue) { - final List percentileList = CollectionUtils.iterableAsArrayList(percentiles); + private void assertConsistent(double[] pcts, PercentileRanks values, long minValue, long maxValue) { + final List percentileList = CollectionUtils.iterableAsArrayList(values); assertEquals(pcts.length, percentileList.size()); for (int i = 0; i < pcts.length; ++i) { final Percentile percentile = percentileList.get(i); @@ -117,7 +117,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0) .subAggregation(randomCompression(percentileRanks("percentile_ranks")) - .percentiles(10, 15))) + .values(10, 15))) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l)); @@ -139,7 +139,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) .field("value") - .percentiles(0, 10, 15, 100)) + .values(0, 10, 15, 100)) .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l)); @@ -160,13 +160,13 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) .field("value") - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue); } @Override @@ -177,7 +177,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( global("global").subAggregation( - randomCompression(percentileRanks("percentile_ranks")).field("value").percentiles(pcts))).execute() + randomCompression(percentileRanks("percentile_ranks")).field("value").values(pcts))).execute() .actionGet(); assertHitCount(searchResponse, 10); @@ -189,10 +189,10 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { assertThat(global.getAggregations(), notNullValue()); assertThat(global.getAggregations().asMap().size(), equalTo(1)); - PercentileRanks percentiles = global.getAggregations().get("percentile_ranks"); - assertThat(percentiles, notNullValue()); - assertThat(percentiles.getName(), equalTo("percentile_ranks")); - assertThat((PercentileRanks) global.getProperty("percentile_ranks"), sameInstance(percentiles)); + PercentileRanks values = global.getAggregations().get("percentile_ranks"); + assertThat(values, notNullValue()); + assertThat(values.getName(), equalTo("percentile_ranks")); + assertThat((PercentileRanks) global.getProperty("percentile_ranks"), sameInstance(values)); } @@ -202,13 +202,13 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) .field("value") - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue); } @Override @@ -218,13 +218,13 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) .field("value") - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue); } @Override @@ -233,14 +233,14 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.field("value").script(new Script("_value - 1")) - .percentiles(pcts)) + .field("value").script(new Script("_value - 1")) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue - 1, maxValue - 1); } @Override @@ -251,15 +251,15 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.field("value") + .field("value") .script(new Script("_value - dec", ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue - 1, maxValue - 1); } @Override @@ -269,13 +269,13 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) .field("values") - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues, maxValues); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues, maxValues); } @Override @@ -284,14 +284,14 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.field("values").script(new Script("_value - 1")) - .percentiles(pcts)) + .field("values").script(new Script("_value - 1")) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues - 1, maxValues - 1); } public void testMultiValuedFieldWithValueScriptReverse() throws Exception { @@ -299,14 +299,14 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.field("values").script(new Script("_value * -1")) - .percentiles(pcts)) + .field("values").script(new Script("_value * -1")) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, -maxValues, -minValues); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, -maxValues, -minValues); } @Override @@ -317,15 +317,15 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.field("values") + .field("values") .script(new Script("_value - dec", ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues - 1, maxValues - 1); } @Override @@ -334,14 +334,14 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.script(new Script("doc['value'].value")) - .percentiles(pcts)) + .script(new Script("doc['value'].value")) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue, maxValue); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue, maxValue); } @Override @@ -352,15 +352,15 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.script( + .script( new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValue - 1, maxValue - 1); } @Override @@ -369,14 +369,14 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks")) -.script(new Script("doc['values'].values")) - .percentiles(pcts)) + .script(new Script("doc['values'].values")) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues, maxValues); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues, maxValues); } @Override @@ -390,13 +390,13 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .script(new Script( "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); - final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks"); - assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1); + final PercentileRanks values = searchResponse.getAggregations().get("percentile_ranks"); + assertConsistent(pcts, values, minValues - 1, maxValues - 1); } public void testOrderBySubAggregation() { @@ -405,7 +405,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { .setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value").interval(2l) - .subAggregation(randomCompression(percentileRanks("percentile_ranks").percentiles(99))) + .subAggregation(randomCompression(percentileRanks("percentile_ranks").values(99))) .order(Order.aggregation("percentile_ranks", "99", asc))) .execute().actionGet(); @@ -414,8 +414,8 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { Histogram histo = searchResponse.getAggregations().get("histo"); double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY; for (Histogram.Bucket bucket : histo.getBuckets()) { - PercentileRanks percentiles = bucket.getAggregations().get("percentile_ranks"); - double p99 = percentiles.percent(99); + PercentileRanks values = bucket.getAggregations().get("percentile_ranks"); + double p99 = values.percent(99); if (asc) { assertThat(p99, greaterThanOrEqualTo(previous)); } else { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java index d14638a386b..da1bfc022d2 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java @@ -31,8 +31,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesBuilder; - +import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorFactory; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -81,7 +80,7 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { return percentiles; } - private static PercentilesBuilder randomCompression(PercentilesBuilder builder) { + private static PercentilesAggregatorFactory randomCompression(PercentilesAggregatorFactory builder) { if (randomBoolean()) { builder.compression(randomIntBetween(20, 120) + randomDouble()); }