Removes Aggregation Builders in place of AggregatorFactory implementations
This commit is contained in:
parent
3b35754f59
commit
11bafa18e1
|
@ -26,7 +26,8 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
||||
|
@ -152,16 +153,26 @@ public class PercolateRequestBuilder extends BroadcastOperationRequestBuilder<Pe
|
|||
}
|
||||
|
||||
/**
|
||||
* Delegates to {@link PercolateSourceBuilder#addAggregation(AbstractAggregationBuilder)}
|
||||
* Delegates to
|
||||
* {@link PercolateSourceBuilder#addAggregation(AggregatorFactory)}
|
||||
*/
|
||||
public PercolateRequestBuilder addAggregation(AbstractAggregationBuilder aggregationBuilder) {
|
||||
public PercolateRequestBuilder addAggregation(AggregatorFactory<?> aggregationBuilder) {
|
||||
sourceBuilder().addAggregation(aggregationBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the percolate request definition directly on the request.
|
||||
* This will overwrite any definitions set by any of the delegate methods.
|
||||
* Delegates to
|
||||
* {@link PercolateSourceBuilder#addAggregation(PipelineAggregatorFactory)}
|
||||
*/
|
||||
public PercolateRequestBuilder addAggregation(PipelineAggregatorFactory aggregationBuilder) {
|
||||
sourceBuilder().addAggregation(aggregationBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the percolate request definition directly on the request. This will
|
||||
* overwrite any definitions set by any of the delegate methods.
|
||||
*/
|
||||
public PercolateRequestBuilder setSource(PercolateSourceBuilder source) {
|
||||
sourceBuilder = source;
|
||||
|
|
|
@ -29,7 +29,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
@ -51,7 +52,8 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
private List<SortBuilder> sorts;
|
||||
private Boolean trackScores;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private List<AbstractAggregationBuilder> aggregations;
|
||||
private List<AggregatorFactory<?>> aggregationFactorys;
|
||||
private List<PipelineAggregatorFactory> pipelineAggregationFactorys;
|
||||
|
||||
/**
|
||||
* Sets the document to run the percolate queries against.
|
||||
|
@ -123,11 +125,22 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
/**
|
||||
* Add an aggregation definition.
|
||||
*/
|
||||
public PercolateSourceBuilder addAggregation(AbstractAggregationBuilder aggregationBuilder) {
|
||||
if (aggregations == null) {
|
||||
aggregations = new ArrayList<>();
|
||||
public PercolateSourceBuilder addAggregation(AggregatorFactory<?> aggregationBuilder) {
|
||||
if (aggregationFactorys == null) {
|
||||
aggregationFactorys = new ArrayList<>();
|
||||
}
|
||||
aggregations.add(aggregationBuilder);
|
||||
aggregationFactorys.add(aggregationBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an aggregation definition.
|
||||
*/
|
||||
public PercolateSourceBuilder addAggregation(PipelineAggregatorFactory aggregationBuilder) {
|
||||
if (pipelineAggregationFactorys == null) {
|
||||
pipelineAggregationFactorys = new ArrayList<>();
|
||||
}
|
||||
pipelineAggregationFactorys.add(aggregationBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -159,11 +172,18 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
if (highlightBuilder != null) {
|
||||
highlightBuilder.toXContent(builder, params);
|
||||
}
|
||||
if (aggregations != null) {
|
||||
if (aggregationFactorys != null || pipelineAggregationFactorys != null) {
|
||||
builder.field("aggregations");
|
||||
builder.startObject();
|
||||
for (AbstractAggregationBuilder aggregation : aggregations) {
|
||||
aggregation.toXContent(builder, params);
|
||||
if (aggregationFactorys != null) {
|
||||
for (AggregatorFactory<?> aggregation : aggregationFactorys) {
|
||||
aggregation.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
if (pipelineAggregationFactorys != null) {
|
||||
for (PipelineAggregatorFactory aggregation : pipelineAggregationFactorys) {
|
||||
aggregation.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
|
|
@ -28,8 +28,8 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
|
@ -364,10 +364,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
|
||||
/**
|
||||
* Adds an aggregation to the search operation.
|
||||
*
|
||||
* NORELEASE REMOVE WHEN AGG REFACTORING IS COMPLETE
|
||||
*/
|
||||
public SearchRequestBuilder addAggregation(AbstractAggregationBuilder aggregation) {
|
||||
public SearchRequestBuilder addAggregation(AggregatorFactory<?> aggregation) {
|
||||
sourceBuilder().aggregation(aggregation);
|
||||
return this;
|
||||
}
|
||||
|
@ -375,7 +373,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
/**
|
||||
* Adds an aggregation to the search operation.
|
||||
*/
|
||||
public SearchRequestBuilder addAggregation(AggregatorFactory aggregation) {
|
||||
public SearchRequestBuilder addAggregation(PipelineAggregatorFactory aggregation) {
|
||||
sourceBuilder().aggregation(aggregation);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -436,7 +436,6 @@ public class SearchModule extends AbstractModule {
|
|||
AggregationBinaryParseElement aggBinaryParseElement = new AggregationBinaryParseElement(aggregatorParsers, indicesQueriesRegistry);
|
||||
AggregationPhase aggPhase = new AggregationPhase(aggParseElement, aggBinaryParseElement);
|
||||
bind(AggregatorParsers.class).toInstance(aggregatorParsers);
|
||||
;
|
||||
bind(AggregationParseElement.class).toInstance(aggParseElement);
|
||||
bind(AggregationPhase.class).toInstance(aggPhase);
|
||||
}
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
||||
/**
|
||||
* Base structure for aggregation builders.
|
||||
*/
|
||||
public abstract class AbstractAggregationBuilder implements ToXContent {
|
||||
|
||||
private final String name;
|
||||
protected final String type;
|
||||
|
||||
/**
|
||||
* Sole constructor, typically used by sub-classes.
|
||||
*/
|
||||
protected AbstractAggregationBuilder(String name, String type) {
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the name of the aggregation that is being built.
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
}
|
|
@ -1,194 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A base class for all bucket aggregation builders. NORELEASE REMOVE WHEN AGG
|
||||
* REFACTORING IS COMPLETE
|
||||
*/
|
||||
@Deprecated
|
||||
public abstract class AggregationBuilder<B extends AggregationBuilder<B>> extends AbstractAggregationBuilder {
|
||||
|
||||
private List<AbstractAggregationBuilder> aggregations;
|
||||
private List<AggregatorFactory> aggregatorFactories;
|
||||
private List<PipelineAggregatorFactory> pipelineAggregatorFactories;
|
||||
private BytesReference aggregationsBinary;
|
||||
private Map<String, Object> metaData;
|
||||
|
||||
/**
|
||||
* Sole constructor, typically used by sub-classes.
|
||||
*/
|
||||
protected AggregationBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sub aggregation to this aggregation. NORELEASE REMOVE THIS WHEN AGG
|
||||
* REFACTOR IS COMPLETE
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public B subAggregation(AbstractAggregationBuilder aggregation) {
|
||||
if (aggregations == null) {
|
||||
aggregations = new ArrayList<>();
|
||||
}
|
||||
aggregations.add(aggregation);
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sub aggregation to this aggregation.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public B subAggregation(AggregatorFactory aggregation) {
|
||||
if (aggregatorFactories == null) {
|
||||
aggregatorFactories = new ArrayList<>();
|
||||
}
|
||||
aggregatorFactories.add(aggregation);
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sub aggregation to this aggregation.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public B subAggregation(PipelineAggregatorFactory aggregation) {
|
||||
if (pipelineAggregatorFactories == null) {
|
||||
pipelineAggregatorFactories = new ArrayList<>();
|
||||
}
|
||||
pipelineAggregatorFactories.add(aggregation);
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a raw (xcontent / json) sub addAggregation.
|
||||
*/
|
||||
@Deprecated
|
||||
public B subAggregation(byte[] aggregationsBinary) {
|
||||
return subAggregation(aggregationsBinary, 0, aggregationsBinary.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a raw (xcontent / json) sub addAggregation.
|
||||
*/
|
||||
@Deprecated
|
||||
public B subAggregation(byte[] aggregationsBinary, int aggregationsBinaryOffset, int aggregationsBinaryLength) {
|
||||
return subAggregation(new BytesArray(aggregationsBinary, aggregationsBinaryOffset, aggregationsBinaryLength));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a raw (xcontent / json) sub addAggregation.
|
||||
*/
|
||||
@Deprecated
|
||||
@SuppressWarnings("unchecked")
|
||||
public B subAggregation(BytesReference aggregationsBinary) {
|
||||
this.aggregationsBinary = aggregationsBinary;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a raw (xcontent / json) sub addAggregation.
|
||||
*/
|
||||
@Deprecated
|
||||
public B subAggregation(XContentBuilder aggs) {
|
||||
return subAggregation(aggs.bytes());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a raw (xcontent / json) sub addAggregation.
|
||||
*/
|
||||
@Deprecated
|
||||
public B subAggregation(Map<String, Object> aggs) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
|
||||
builder.map(aggs);
|
||||
return subAggregation(builder);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchGenerationException("Failed to generate [" + aggs + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the meta data to be included in the aggregation response
|
||||
*/
|
||||
public B setMetaData(Map<String, Object> metaData) {
|
||||
this.metaData = metaData;
|
||||
return (B)this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(getName());
|
||||
|
||||
if (this.metaData != null) {
|
||||
builder.field("meta", this.metaData);
|
||||
}
|
||||
builder.field(type);
|
||||
internalXContent(builder, params);
|
||||
|
||||
if (aggregations != null || aggregatorFactories != null || pipelineAggregatorFactories != null || aggregationsBinary != null) {
|
||||
|
||||
if (aggregations != null || aggregatorFactories != null || pipelineAggregatorFactories != null) {
|
||||
builder.startObject("aggregations");
|
||||
if (aggregations != null) {
|
||||
for (AbstractAggregationBuilder subAgg : aggregations) {
|
||||
subAgg.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
if (aggregatorFactories != null) {
|
||||
for (AggregatorFactory subAgg : aggregatorFactories) {
|
||||
subAgg.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
if (pipelineAggregatorFactories != null) {
|
||||
for (PipelineAggregatorFactory subAgg : pipelineAggregatorFactories) {
|
||||
subAgg.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
if (aggregationsBinary != null) {
|
||||
if (XContentFactory.xContentType(aggregationsBinary) == builder.contentType()) {
|
||||
builder.rawField("aggregations", aggregationsBinary);
|
||||
} else {
|
||||
builder.field("aggregations_binary", aggregationsBinary);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
}
|
|
@ -19,36 +19,39 @@
|
|||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.Children;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ParentToChildrenAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.Filters;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator.DateHistogramFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.Nested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.GeoDistanceFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ipv4.IPv4RangeAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.Sampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality;
|
||||
|
@ -62,9 +65,9 @@ import org.elasticsearch.search.aggregations.metrics.max.MaxAggregator;
|
|||
import org.elasticsearch.search.aggregations.metrics.min.Min;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
|
||||
|
@ -138,134 +141,151 @@ public class AggregationBuilders {
|
|||
/**
|
||||
* Create a new {@link Filter} aggregation with the given name.
|
||||
*/
|
||||
public static FilterAggregationBuilder filter(String name) {
|
||||
return new FilterAggregationBuilder(name);
|
||||
public static FilterAggregator.Factory filter(String name, QueryBuilder<?> filter) {
|
||||
return new FilterAggregator.Factory(name, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filters} aggregation with the given name.
|
||||
*/
|
||||
public static FiltersAggregationBuilder filters(String name) {
|
||||
return new FiltersAggregationBuilder(name);
|
||||
public static FiltersAggregator.Factory filters(String name, KeyedFilter... filters) {
|
||||
return new FiltersAggregator.Factory(name, filters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filters} aggregation with the given name.
|
||||
*/
|
||||
public static FiltersAggregator.Factory filters(String name, QueryBuilder<?>... filters) {
|
||||
return new FiltersAggregator.Factory(name, filters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sampler} aggregation with the given name.
|
||||
*/
|
||||
public static SamplerAggregationBuilder sampler(String name) {
|
||||
return new SamplerAggregationBuilder(name);
|
||||
public static SamplerAggregator.Factory sampler(String name) {
|
||||
return new SamplerAggregator.Factory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sampler} aggregation with the given name.
|
||||
*/
|
||||
public static SamplerAggregator.DiversifiedFactory diversifiedSampler(String name) {
|
||||
return new SamplerAggregator.DiversifiedFactory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Global} aggregation with the given name.
|
||||
*/
|
||||
public static GlobalBuilder global(String name) {
|
||||
return new GlobalBuilder(name);
|
||||
public static GlobalAggregator.Factory global(String name) {
|
||||
return new GlobalAggregator.Factory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Missing} aggregation with the given name.
|
||||
*/
|
||||
public static MissingBuilder missing(String name) {
|
||||
return new MissingBuilder(name);
|
||||
public static MissingAggregator.Factory missing(String name) {
|
||||
return new MissingAggregator.Factory(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Nested} aggregation with the given name.
|
||||
*/
|
||||
public static NestedBuilder nested(String name) {
|
||||
return new NestedBuilder(name);
|
||||
public static NestedAggregator.Factory nested(String name, String path) {
|
||||
return new NestedAggregator.Factory(name, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReverseNested} aggregation with the given name.
|
||||
*/
|
||||
public static ReverseNestedBuilder reverseNested(String name) {
|
||||
return new ReverseNestedBuilder(name);
|
||||
public static ReverseNestedAggregator.Factory reverseNested(String name) {
|
||||
return new ReverseNestedAggregator.Factory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Children} aggregation with the given name.
|
||||
*/
|
||||
public static ChildrenBuilder children(String name) {
|
||||
return new ChildrenBuilder(name);
|
||||
public static ParentToChildrenAggregator.Factory children(String name, String childType) {
|
||||
return new ParentToChildrenAggregator.Factory(name, childType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoDistance} aggregation with the given name.
|
||||
*/
|
||||
public static GeoDistanceBuilder geoDistance(String name) {
|
||||
return new GeoDistanceBuilder(name);
|
||||
public static GeoDistanceFactory geoDistance(String name, GeoPoint origin) {
|
||||
return new GeoDistanceFactory(name, origin);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Histogram} aggregation with the given name.
|
||||
*/
|
||||
public static HistogramBuilder histogram(String name) {
|
||||
return new HistogramBuilder(name);
|
||||
public static HistogramAggregator.Factory<?> histogram(String name) {
|
||||
return new HistogramAggregator.Factory<>(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoHashGrid} aggregation with the given name.
|
||||
*/
|
||||
public static GeoHashGridBuilder geohashGrid(String name) {
|
||||
return new GeoHashGridBuilder(name);
|
||||
public static GeoHashGridParser.GeoGridFactory geohashGrid(String name) {
|
||||
return new GeoHashGridParser.GeoGridFactory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link SignificantTerms} aggregation with the given name.
|
||||
*/
|
||||
public static SignificantTermsBuilder significantTerms(String name) {
|
||||
return new SignificantTermsBuilder(name);
|
||||
public static SignificantTermsAggregatorFactory significantTerms(String name) {
|
||||
return new SignificantTermsAggregatorFactory(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateHistogramBuilder} aggregation with the given name.
|
||||
* Create a new {@link DateHistogramFactory} aggregation with the given
|
||||
* name.
|
||||
*/
|
||||
public static DateHistogramBuilder dateHistogram(String name) {
|
||||
return new DateHistogramBuilder(name);
|
||||
public static DateHistogramFactory dateHistogram(String name) {
|
||||
return new DateHistogramFactory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Range} aggregation with the given name.
|
||||
*/
|
||||
public static RangeBuilder range(String name) {
|
||||
return new RangeBuilder(name);
|
||||
public static RangeAggregator.Factory range(String name) {
|
||||
return new RangeAggregator.Factory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateRangeBuilder} aggregation with the given name.
|
||||
* Create a new {@link DateRangeAggregatorFactory} aggregation with the
|
||||
* given name.
|
||||
*/
|
||||
public static DateRangeBuilder dateRange(String name) {
|
||||
return new DateRangeBuilder(name);
|
||||
public static DateRangeAggregatorFactory dateRange(String name) {
|
||||
return new DateRangeAggregatorFactory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link IPv4RangeBuilder} aggregation with the given name.
|
||||
* Create a new {@link IPv4RangeAggregatorFactory} aggregation with the
|
||||
* given name.
|
||||
*/
|
||||
public static IPv4RangeBuilder ipRange(String name) {
|
||||
return new IPv4RangeBuilder(name);
|
||||
public static IPv4RangeAggregatorFactory ipRange(String name) {
|
||||
return new IPv4RangeAggregatorFactory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Terms} aggregation with the given name.
|
||||
*/
|
||||
public static TermsBuilder terms(String name) {
|
||||
return new TermsBuilder(name);
|
||||
public static TermsAggregatorFactory terms(String name) {
|
||||
return new TermsAggregatorFactory(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Percentiles} aggregation with the given name.
|
||||
*/
|
||||
public static PercentilesBuilder percentiles(String name) {
|
||||
return new PercentilesBuilder(name);
|
||||
public static PercentilesAggregatorFactory percentiles(String name) {
|
||||
return new PercentilesAggregatorFactory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link PercentileRanks} aggregation with the given name.
|
||||
*/
|
||||
public static PercentileRanksBuilder percentileRanks(String name) {
|
||||
return new PercentileRanksBuilder(name);
|
||||
public static PercentileRanksAggregatorFactory percentileRanks(String name) {
|
||||
return new PercentileRanksAggregatorFactory(name);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -68,7 +68,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
* @return an empty {@link AggregatorFactory} instance for this parser
|
||||
* that can be used for deserialization
|
||||
*/
|
||||
AggregatorFactory[] getFactoryPrototypes();
|
||||
AggregatorFactory<?> getFactoryPrototypes();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -46,25 +46,25 @@ import java.util.Set;
|
|||
*/
|
||||
public class AggregatorFactories extends ToXContentToBytes implements Writeable<AggregatorFactories> {
|
||||
|
||||
public static final AggregatorFactories EMPTY = new AggregatorFactories(new AggregatorFactory[0],
|
||||
public static final AggregatorFactories EMPTY = new AggregatorFactories(new AggregatorFactory<?>[0],
|
||||
new ArrayList<PipelineAggregatorFactory>());
|
||||
|
||||
private AggregatorFactory parent;
|
||||
private AggregatorFactory[] factories;
|
||||
private AggregatorFactory<?> parent;
|
||||
private AggregatorFactory<?>[] factories;
|
||||
private List<PipelineAggregatorFactory> pipelineAggregatorFactories;
|
||||
|
||||
public static Builder builder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
private AggregatorFactories(AggregatorFactory[] factories,
|
||||
private AggregatorFactories(AggregatorFactory<?>[] factories,
|
||||
List<PipelineAggregatorFactory> pipelineAggregators) {
|
||||
this.factories = factories;
|
||||
this.pipelineAggregatorFactories = pipelineAggregators;
|
||||
}
|
||||
|
||||
public void init(AggregationContext context) {
|
||||
for (AggregatorFactory factory : factories) {
|
||||
for (AggregatorFactory<?> factory : factories) {
|
||||
factory.init(context);
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
* buckets.
|
||||
*/
|
||||
public Aggregator[] createSubAggregators(Aggregator parent) throws IOException {
|
||||
Aggregator[] aggregators = new Aggregator[count()];
|
||||
Aggregator[] aggregators = new Aggregator[countAggregators()];
|
||||
for (int i = 0; i < factories.length; ++i) {
|
||||
// TODO: sometimes even sub aggregations always get called with bucket 0, eg. if
|
||||
// you have a terms agg under a top-level filter agg. We should have a way to
|
||||
|
@ -105,19 +105,30 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
return aggregators;
|
||||
}
|
||||
|
||||
public int count() {
|
||||
/**
|
||||
* @return the number of sub-aggregator factories not including pipeline
|
||||
* aggregator factories
|
||||
*/
|
||||
public int countAggregators() {
|
||||
return factories.length;
|
||||
}
|
||||
|
||||
void setParent(AggregatorFactory parent) {
|
||||
/**
|
||||
* @return the number of pipeline aggregator factories
|
||||
*/
|
||||
public int countPipelineAggregators() {
|
||||
return pipelineAggregatorFactories.size();
|
||||
}
|
||||
|
||||
void setParent(AggregatorFactory<?> parent) {
|
||||
this.parent = parent;
|
||||
for (AggregatorFactory factory : factories) {
|
||||
for (AggregatorFactory<?> factory : factories) {
|
||||
factory.parent = parent;
|
||||
}
|
||||
}
|
||||
|
||||
public void validate() {
|
||||
for (AggregatorFactory factory : factories) {
|
||||
for (AggregatorFactory<?> factory : factories) {
|
||||
factory.validate();
|
||||
}
|
||||
for (PipelineAggregatorFactory factory : pipelineAggregatorFactories) {
|
||||
|
@ -128,11 +139,21 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
public static class Builder {
|
||||
|
||||
private final Set<String> names = new HashSet<>();
|
||||
private final List<AggregatorFactory> factories = new ArrayList<>();
|
||||
private final List<AggregatorFactory<?>> factories = new ArrayList<>();
|
||||
private final List<PipelineAggregatorFactory> pipelineAggregatorFactories = new ArrayList<>();
|
||||
private boolean skipResolveOrder;
|
||||
|
||||
public Builder addAggregator(AggregatorFactory factory) {
|
||||
public Builder addAggregators(AggregatorFactories factories) {
|
||||
for (AggregatorFactory<?> factory : factories.factories) {
|
||||
addAggregator(factory);
|
||||
}
|
||||
for (PipelineAggregatorFactory factory : factories.pipelineAggregatorFactories) {
|
||||
addPipelineAggregator(factory);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addAggregator(AggregatorFactory<?> factory) {
|
||||
if (!names.add(factory.name)) {
|
||||
throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
|
||||
}
|
||||
|
@ -163,17 +184,17 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
} else {
|
||||
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorFactories, this.factories);
|
||||
}
|
||||
return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]), orderedpipelineAggregators);
|
||||
return new AggregatorFactories(factories.toArray(new AggregatorFactory<?>[factories.size()]), orderedpipelineAggregators);
|
||||
}
|
||||
|
||||
private List<PipelineAggregatorFactory> resolvePipelineAggregatorOrder(List<PipelineAggregatorFactory> pipelineAggregatorFactories,
|
||||
List<AggregatorFactory> aggFactories) {
|
||||
List<AggregatorFactory<?>> aggFactories) {
|
||||
Map<String, PipelineAggregatorFactory> pipelineAggregatorFactoriesMap = new HashMap<>();
|
||||
for (PipelineAggregatorFactory factory : pipelineAggregatorFactories) {
|
||||
pipelineAggregatorFactoriesMap.put(factory.getName(), factory);
|
||||
}
|
||||
Map<String, AggregatorFactory> aggFactoriesMap = new HashMap<>();
|
||||
for (AggregatorFactory aggFactory : aggFactories) {
|
||||
Map<String, AggregatorFactory<?>> aggFactoriesMap = new HashMap<>();
|
||||
for (AggregatorFactory<?> aggFactory : aggFactories) {
|
||||
aggFactoriesMap.put(aggFactory.name, aggFactory);
|
||||
}
|
||||
List<PipelineAggregatorFactory> orderedPipelineAggregatorrs = new LinkedList<>();
|
||||
|
@ -187,7 +208,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
return orderedPipelineAggregatorrs;
|
||||
}
|
||||
|
||||
private void resolvePipelineAggregatorOrder(Map<String, AggregatorFactory> aggFactoriesMap,
|
||||
private void resolvePipelineAggregatorOrder(Map<String, AggregatorFactory<?>> aggFactoriesMap,
|
||||
Map<String, PipelineAggregatorFactory> pipelineAggregatorFactoriesMap,
|
||||
List<PipelineAggregatorFactory> orderedPipelineAggregators, List<PipelineAggregatorFactory> unmarkedFactories, Set<PipelineAggregatorFactory> temporarilyMarked,
|
||||
PipelineAggregatorFactory factory) {
|
||||
|
@ -202,7 +223,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) {
|
||||
continue;
|
||||
} else if (aggFactoriesMap.containsKey(firstAggName)) {
|
||||
AggregatorFactory aggFactory = aggFactoriesMap.get(firstAggName);
|
||||
AggregatorFactory<?> aggFactory = aggFactoriesMap.get(firstAggName);
|
||||
for (int i = 1; i < bucketsPathElements.size(); i++) {
|
||||
PathElement pathElement = bucketsPathElements.get(i);
|
||||
String aggName = pathElement.name;
|
||||
|
@ -211,9 +232,9 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
} else {
|
||||
// Check the non-pipeline sub-aggregator
|
||||
// factories
|
||||
AggregatorFactory[] subFactories = aggFactory.factories.factories;
|
||||
AggregatorFactory<?>[] subFactories = aggFactory.factories.factories;
|
||||
boolean foundSubFactory = false;
|
||||
for (AggregatorFactory subFactory : subFactories) {
|
||||
for (AggregatorFactory<?> subFactory : subFactories) {
|
||||
if (aggName.equals(subFactory.name)) {
|
||||
aggFactory = subFactory;
|
||||
foundSubFactory = true;
|
||||
|
@ -254,8 +275,8 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
}
|
||||
}
|
||||
|
||||
AggregatorFactory[] getAggregatorFactories() {
|
||||
return this.factories.toArray(new AggregatorFactory[this.factories.size()]);
|
||||
AggregatorFactory<?>[] getAggregatorFactories() {
|
||||
return this.factories.toArray(new AggregatorFactory<?>[this.factories.size()]);
|
||||
}
|
||||
|
||||
List<PipelineAggregatorFactory> getPipelineAggregatorFactories() {
|
||||
|
@ -266,9 +287,9 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
@Override
|
||||
public AggregatorFactories readFrom(StreamInput in) throws IOException {
|
||||
int factoriesSize = in.readVInt();
|
||||
AggregatorFactory[] factoriesList = new AggregatorFactory[factoriesSize];
|
||||
AggregatorFactory<?>[] factoriesList = new AggregatorFactory<?>[factoriesSize];
|
||||
for (int i = 0; i < factoriesSize; i++) {
|
||||
AggregatorFactory factory = in.readAggregatorFactory();
|
||||
AggregatorFactory<?> factory = in.readAggregatorFactory();
|
||||
factoriesList[i] = factory;
|
||||
}
|
||||
int pipelineFactoriesSize = in.readVInt();
|
||||
|
@ -285,7 +306,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.factories.length);
|
||||
for (AggregatorFactory factory : factories) {
|
||||
for (AggregatorFactory<?> factory : factories) {
|
||||
out.writeAggregatorFactory(factory);
|
||||
}
|
||||
out.writeVInt(this.pipelineAggregatorFactories.size());
|
||||
|
@ -298,7 +319,7 @@ public class AggregatorFactories extends ToXContentToBytes implements Writeable<
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (factories != null) {
|
||||
for (AggregatorFactory subAgg : factories) {
|
||||
for (AggregatorFactory<?> subAgg : factories) {
|
||||
subAgg.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.util.ObjectArray;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||
|
||||
|
@ -96,6 +97,30 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
return (AF) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sub aggregation to this aggregation.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public AF subAggregation(AggregatorFactory<?> aggregation) {
|
||||
AggregatorFactories.Builder builder = AggregatorFactories.builder();
|
||||
builder.addAggregators(factories);
|
||||
builder.addAggregator(aggregation);
|
||||
factories = builder.build();
|
||||
return (AF) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a sub aggregation to this aggregation.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public AF subAggregation(PipelineAggregatorFactory aggregation) {
|
||||
AggregatorFactories.Builder builder = AggregatorFactories.builder();
|
||||
builder.addAggregators(factories);
|
||||
builder.addPipelineAggregator(aggregation);
|
||||
factories = builder.build();
|
||||
return (AF) this;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
@ -134,8 +159,9 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
public void doValidate() {
|
||||
}
|
||||
|
||||
public void setMetaData(Map<String, Object> metaData) {
|
||||
public AF setMetaData(Map<String, Object> metaData) {
|
||||
this.metaData = metaData;
|
||||
return (AF) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -148,10 +174,7 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
return factory;
|
||||
}
|
||||
|
||||
// NORELEASE make this abstract when agg refactor complete
|
||||
protected AggregatorFactory<AF> doReadFrom(String name, StreamInput in) throws IOException {
|
||||
return null;
|
||||
}
|
||||
protected abstract AggregatorFactory<AF> doReadFrom(String name, StreamInput in) throws IOException;
|
||||
|
||||
@Override
|
||||
public final void writeTo(StreamOutput out) throws IOException {
|
||||
|
@ -161,9 +184,7 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
out.writeMap(metaData);
|
||||
}
|
||||
|
||||
// NORELEASE make this abstract when agg refactor complete
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
}
|
||||
protected abstract void doWriteTo(StreamOutput out) throws IOException;
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
@ -175,7 +196,7 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
builder.field(type.name());
|
||||
internalXContent(builder, params);
|
||||
|
||||
if (factories != null && factories.count() > 0) {
|
||||
if (factories != null && (factories.countAggregators() + factories.countPipelineAggregators()) > 0) {
|
||||
builder.field("aggregations");
|
||||
factories.toXContent(builder, params);
|
||||
|
||||
|
@ -184,10 +205,7 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
return builder.endObject();
|
||||
}
|
||||
|
||||
// NORELEASE make this method abstract when agg refactor complete
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
|
@ -327,12 +345,7 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
return Objects.hash(factories, metaData, name, type, doHashCode());
|
||||
}
|
||||
|
||||
// NORELEASE make this method abstract here when agg refactor complete (so
|
||||
// that subclasses are forced to implement it)
|
||||
protected int doHashCode() {
|
||||
throw new UnsupportedOperationException(
|
||||
"This method should be implemented by a sub-class and should not rely on this method. When agg re-factoring is complete this method will be made abstract.");
|
||||
}
|
||||
protected abstract int doHashCode();
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
@ -352,11 +365,6 @@ public abstract class AggregatorFactory<AF extends AggregatorFactory<AF>> extend
|
|||
return doEquals(obj);
|
||||
}
|
||||
|
||||
// NORELEASE make this method abstract here when agg refactor complete (so
|
||||
// that subclasses are forced to implement it)
|
||||
protected boolean doEquals(Object obj) {
|
||||
throw new UnsupportedOperationException(
|
||||
"This method should be implemented by a sub-class and should not rely on this method. When agg re-factoring is complete this method will be made abstract.");
|
||||
}
|
||||
protected abstract boolean doEquals(Object obj);
|
||||
|
||||
}
|
||||
|
|
|
@ -60,23 +60,15 @@ public class AggregatorParsers {
|
|||
Map<String, Aggregator.Parser> aggParsersBuilder = new HashMap<>(aggParsers.size());
|
||||
for (Aggregator.Parser parser : aggParsers) {
|
||||
aggParsersBuilder.put(parser.type(), parser);
|
||||
AggregatorFactory[] factoryPrototypes = parser.getFactoryPrototypes();
|
||||
// NORELEASE remove this check when agg refactoring complete
|
||||
if (factoryPrototypes != null) {
|
||||
for (AggregatorFactory factoryPrototype : factoryPrototypes) {
|
||||
namedWriteableRegistry.registerPrototype(AggregatorFactory.class, factoryPrototype);
|
||||
}
|
||||
}
|
||||
AggregatorFactory<?> factoryPrototype = parser.getFactoryPrototypes();
|
||||
namedWriteableRegistry.registerPrototype(AggregatorFactory.class, factoryPrototype);
|
||||
}
|
||||
this.aggParsers = unmodifiableMap(aggParsersBuilder);
|
||||
Map<String, PipelineAggregator.Parser> pipelineAggregatorParsersBuilder = new HashMap<>(pipelineAggregatorParsers.size());
|
||||
for (PipelineAggregator.Parser parser : pipelineAggregatorParsers) {
|
||||
pipelineAggregatorParsersBuilder.put(parser.type(), parser);
|
||||
PipelineAggregatorFactory factoryPrototype = parser.getFactoryPrototype();
|
||||
// NORELEASE remove this check when agg refactoring complete
|
||||
if (factoryPrototype != null) {
|
||||
namedWriteableRegistry.registerPrototype(PipelineAggregatorFactory.class, factoryPrototype);
|
||||
}
|
||||
namedWriteableRegistry.registerPrototype(PipelineAggregatorFactory.class, factoryPrototype);
|
||||
}
|
||||
this.pipelineAggregatorParsers = unmodifiableMap(pipelineAggregatorParsersBuilder);
|
||||
}
|
||||
|
|
|
@ -1,112 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A base class for all bucket aggregation builders that are based on values (either script generated or field data values)
|
||||
*/
|
||||
public abstract class ValuesSourceAggregationBuilder<B extends ValuesSourceAggregationBuilder<B>> extends AggregationBuilder<B> {
|
||||
|
||||
private Script script;
|
||||
private String field;
|
||||
@Deprecated
|
||||
private String scriptString;
|
||||
@Deprecated
|
||||
private String lang;
|
||||
@Deprecated
|
||||
private Map<String, Object> params;
|
||||
private Object missing;
|
||||
|
||||
/**
|
||||
* Constructs a new builder.
|
||||
*
|
||||
* @param name The name of the aggregation.
|
||||
* @param type The type of the aggregation.
|
||||
*/
|
||||
protected ValuesSourceAggregationBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the field from which the values will be extracted.
|
||||
*
|
||||
* @param field The name of the field
|
||||
* @return This builder (fluent interface support)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public B field(String field) {
|
||||
this.field = field;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the script which generates the values. If the script is configured along with the field (as in {@link #field(String)}), then
|
||||
* this script will be treated as a {@code value script}. A <i>value script</i> will be applied on the values that are extracted from
|
||||
* the field data (you can refer to that value in the script using the {@code _value} reserved variable). If only the script is configured
|
||||
* (and the no field is configured next to it), then the script will be responsible to generate the values that will be aggregated.
|
||||
*
|
||||
* @param script The configured script.
|
||||
* @return This builder (fluent interface support)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public B script(Script script) {
|
||||
this.script = script;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the value to use when documents miss a value.
|
||||
*/
|
||||
public B missing(Object missingValue) {
|
||||
this.missing = missingValue;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final XContentBuilder internalXContent(XContentBuilder builder, Params builderParams) throws IOException {
|
||||
builder.startObject();
|
||||
if (field != null) {
|
||||
builder.field("field", field);
|
||||
}
|
||||
|
||||
if (script == null) {
|
||||
if (scriptString != null) {
|
||||
builder.field("script", new Script(scriptString, ScriptType.INLINE, lang, params));
|
||||
}
|
||||
} else {
|
||||
builder.field("script", script);
|
||||
}
|
||||
if (missing != null) {
|
||||
builder.field("missing", missing);
|
||||
}
|
||||
|
||||
doInternalXContent(builder, builderParams);
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
protected abstract XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.children;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Children} aggregation.
|
||||
*/
|
||||
public class ChildrenBuilder extends AggregationBuilder<ChildrenBuilder> {
|
||||
|
||||
private String childType;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public ChildrenBuilder(String name) {
|
||||
super(name, InternalChildren.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the type of children documents. This parameter is compulsory.
|
||||
*/
|
||||
public ChildrenBuilder childType(String childType) {
|
||||
this.childType = childType;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (childType == null) {
|
||||
throw new SearchSourceBuilderException("child_type must be set on children aggregation [" + getName() + "]");
|
||||
}
|
||||
builder.field("type", childType);
|
||||
return builder.endObject();
|
||||
}
|
||||
}
|
|
@ -67,7 +67,7 @@ public class ChildrenParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new ParentToChildrenAggregator.Factory(null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new ParentToChildrenAggregator.Factory(null, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.filter;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Filter} aggregation.
|
||||
*/
|
||||
public class FilterAggregationBuilder extends AggregationBuilder<FilterAggregationBuilder> {
|
||||
|
||||
private QueryBuilder filter;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public FilterAggregationBuilder(String name) {
|
||||
super(name, InternalFilter.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filter to use, only documents that match this filter will fall
|
||||
* into the bucket defined by this {@link Filter} aggregation.
|
||||
*/
|
||||
public FilterAggregationBuilder filter(QueryBuilder filter) {
|
||||
this.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (filter == null) {
|
||||
throw new SearchSourceBuilderException("filter must be set on filter aggregation [" + getName() + "]");
|
||||
}
|
||||
filter.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -52,8 +52,8 @@ public class FilterParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new FilterAggregator.Factory(null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new FilterAggregator.Factory(null, null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,126 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.filters;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Filters} aggregation.
|
||||
*/
|
||||
public class FiltersAggregationBuilder extends AggregationBuilder<FiltersAggregationBuilder> {
|
||||
|
||||
private Map<String, QueryBuilder> keyedFilters = null;
|
||||
private List<QueryBuilder> nonKeyedFilters = null;
|
||||
private Boolean otherBucket;
|
||||
private String otherBucketKey;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public FiltersAggregationBuilder(String name) {
|
||||
super(name, InternalFilters.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new filter with the given key.
|
||||
* NOTE: if a filter was already defined for this key, then this filter will replace it.
|
||||
* NOTE: the same {@link FiltersAggregationBuilder} cannot have both keyed and non-keyed filters
|
||||
*/
|
||||
public FiltersAggregationBuilder filter(String key, QueryBuilder filter) {
|
||||
if (keyedFilters == null) {
|
||||
keyedFilters = new LinkedHashMap<>();
|
||||
}
|
||||
keyedFilters.put(key, filter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new filter with no key.
|
||||
* NOTE: the same {@link FiltersAggregationBuilder} cannot have both keyed and non-keyed filters.
|
||||
*/
|
||||
public FiltersAggregationBuilder filter(QueryBuilder filter) {
|
||||
if (nonKeyedFilters == null) {
|
||||
nonKeyedFilters = new ArrayList<>();
|
||||
}
|
||||
nonKeyedFilters.add(filter);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include a bucket for documents not matching any filter
|
||||
*/
|
||||
public FiltersAggregationBuilder otherBucket(boolean otherBucket) {
|
||||
this.otherBucket = otherBucket;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The key to use for the bucket for documents not matching any filter. Will
|
||||
* implicitly enable the other bucket if set.
|
||||
*/
|
||||
public FiltersAggregationBuilder otherBucketKey(String otherBucketKey) {
|
||||
this.otherBucketKey = otherBucketKey;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (keyedFilters == null && nonKeyedFilters == null) {
|
||||
throw new SearchSourceBuilderException("At least one filter must be set on filter aggregation [" + getName() + "]");
|
||||
}
|
||||
if (keyedFilters != null && nonKeyedFilters != null) {
|
||||
throw new SearchSourceBuilderException("Cannot add both keyed and non-keyed filters to filters aggregation");
|
||||
}
|
||||
|
||||
if (keyedFilters != null) {
|
||||
builder.startObject(FiltersParser.FILTERS_FIELD.getPreferredName());
|
||||
for (Map.Entry<String, QueryBuilder> entry : keyedFilters.entrySet()) {
|
||||
builder.field(entry.getKey());
|
||||
entry.getValue().toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
if (nonKeyedFilters != null) {
|
||||
builder.startArray(FiltersParser.FILTERS_FIELD.getPreferredName());
|
||||
for (QueryBuilder filterBuilder : nonKeyedFilters) {
|
||||
filterBuilder.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
}
|
||||
if (otherBucketKey != null) {
|
||||
builder.field(FiltersParser.OTHER_BUCKET_KEY_FIELD.getPreferredName(), otherBucketKey);
|
||||
}
|
||||
if (otherBucket != null) {
|
||||
builder.field(FiltersParser.OTHER_BUCKET_FIELD.getPreferredName(), otherBucket);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
}
|
|
@ -45,6 +45,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -210,7 +211,11 @@ public class FiltersAggregator extends BucketsAggregator {
|
|||
* @param filters
|
||||
* the KeyedFilters to use with this aggregation.
|
||||
*/
|
||||
public Factory(String name, List<KeyedFilter> filters) {
|
||||
public Factory(String name, KeyedFilter... filters) {
|
||||
this(name, Arrays.asList(filters));
|
||||
}
|
||||
|
||||
private Factory(String name, List<KeyedFilter> filters) {
|
||||
super(name, InternalFilters.TYPE);
|
||||
this.filters = filters;
|
||||
this.keyed = true;
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -128,7 +127,7 @@ public class FiltersParser implements Aggregator.Parser {
|
|||
|
||||
FiltersAggregator.Factory factory;
|
||||
if (keyedFilters != null) {
|
||||
factory = new FiltersAggregator.Factory(aggregationName, keyedFilters);
|
||||
factory = new FiltersAggregator.Factory(aggregationName, keyedFilters.toArray(new FiltersAggregator.KeyedFilter[keyedFilters.size()]));
|
||||
} else {
|
||||
factory = new FiltersAggregator.Factory(aggregationName, nonKeyedFilters.toArray(new QueryBuilder<?>[nonKeyedFilters.size()]));
|
||||
}
|
||||
|
@ -142,8 +141,8 @@ public class FiltersParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new FiltersAggregator.Factory(null, Collections.emptyList()) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new FiltersAggregator.Factory(null, new FiltersAggregator.KeyedFilter[0]);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,97 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Creates an aggregation based on bucketing points into GeoHashes
|
||||
*/
|
||||
public class GeoHashGridBuilder extends AggregationBuilder<GeoHashGridBuilder> {
|
||||
|
||||
|
||||
private String field;
|
||||
private int precision = GeoHashGridParams.DEFAULT_PRECISION;
|
||||
private int requiredSize = GeoHashGridParams.DEFAULT_MAX_NUM_CELLS;
|
||||
private int shardSize = 0;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public GeoHashGridBuilder(String name) {
|
||||
super(name, InternalGeoHashGrid.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the field to use to get geo points.
|
||||
*/
|
||||
public GeoHashGridBuilder field(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the geohash precision to use for this aggregation. The higher the
|
||||
* precision, the more fine-grained this aggregation will be.
|
||||
*/
|
||||
public GeoHashGridBuilder precision(int precision) {
|
||||
this.precision = GeoHashGridParams.checkPrecision(precision);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of buckets to return.
|
||||
*/
|
||||
public GeoHashGridBuilder size(int requiredSize) {
|
||||
this.requiredSize = requiredSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: Set the number of buckets to get on each shard to improve
|
||||
* accuracy.
|
||||
*/
|
||||
public GeoHashGridBuilder shardSize(int shardSize) {
|
||||
this.shardSize = shardSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (field != null) {
|
||||
builder.field("field", field);
|
||||
}
|
||||
if (precision != GeoHashGridParams.DEFAULT_PRECISION) {
|
||||
builder.field(GeoHashGridParams.FIELD_PRECISION.getPreferredName(), precision);
|
||||
}
|
||||
if (requiredSize != GeoHashGridParams.DEFAULT_MAX_NUM_CELLS) {
|
||||
builder.field(GeoHashGridParams.FIELD_SIZE.getPreferredName(), requiredSize);
|
||||
}
|
||||
if (shardSize != 0) {
|
||||
builder.field(GeoHashGridParams.FIELD_SHARD_SIZE.getPreferredName(), shardSize);
|
||||
}
|
||||
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
}
|
|
@ -72,8 +72,8 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser {
|
|||
return InternalGeoHashGrid.TYPE.name();
|
||||
}
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new GeoGridFactory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new GeoGridFactory(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.global;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Global} aggregation.
|
||||
*/
|
||||
public class GlobalBuilder extends AggregationBuilder<GlobalBuilder> {
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public GlobalBuilder(String name) {
|
||||
super(name, InternalGlobal.TYPE.name());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.startObject().endObject();
|
||||
}
|
||||
}
|
|
@ -42,8 +42,8 @@ public class GlobalParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new GlobalAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new GlobalAggregator.Factory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,186 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@code DateHistogram} aggregation.
|
||||
*/
|
||||
public class DateHistogramBuilder extends ValuesSourceAggregationBuilder<DateHistogramBuilder> {
|
||||
|
||||
private Object interval;
|
||||
private Histogram.Order order;
|
||||
private Long minDocCount;
|
||||
private Object extendedBoundsMin;
|
||||
private Object extendedBoundsMax;
|
||||
private String timeZone;
|
||||
private String format;
|
||||
private String offset;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public DateHistogramBuilder(String name) {
|
||||
super(name, InternalDateHistogram.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the interval in milliseconds.
|
||||
*/
|
||||
public DateHistogramBuilder interval(long interval) {
|
||||
this.interval = interval;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the interval.
|
||||
*/
|
||||
public DateHistogramBuilder interval(DateHistogramInterval interval) {
|
||||
this.interval = interval;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the order by which the buckets will be returned.
|
||||
*/
|
||||
public DateHistogramBuilder order(Histogram.Order order) {
|
||||
this.order = order;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum document count per bucket. Buckets with less documents
|
||||
* than this min value will not be returned.
|
||||
*/
|
||||
public DateHistogramBuilder minDocCount(long minDocCount) {
|
||||
this.minDocCount = minDocCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the timezone in which to translate dates before computing buckets.
|
||||
*/
|
||||
public DateHistogramBuilder timeZone(String timeZone) {
|
||||
this.timeZone = timeZone;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param offset sets the offset of time intervals in this histogram
|
||||
* @return the current builder
|
||||
*/
|
||||
public DateHistogramBuilder offset(String offset) {
|
||||
this.offset = offset;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the format to use for dates.
|
||||
*/
|
||||
public DateHistogramBuilder format(String format) {
|
||||
this.format = format;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set extended bounds for the histogram. In case the lower value in the
|
||||
* histogram would be greater than <code>min</code> or the upper value would
|
||||
* be less than <code>max</code>, empty buckets will be generated.
|
||||
*/
|
||||
public DateHistogramBuilder extendedBounds(Long min, Long max) {
|
||||
extendedBoundsMin = min;
|
||||
extendedBoundsMax = max;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set extended bounds for the histogram. In case the lower value in the
|
||||
* histogram would be greater than <code>min</code> or the upper value would
|
||||
* be less than <code>max</code>, empty buckets will be generated.
|
||||
*/
|
||||
public DateHistogramBuilder extendedBounds(String min, String max) {
|
||||
extendedBoundsMin = min;
|
||||
extendedBoundsMax = max;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set extended bounds for the histogram. In case the lower value in the
|
||||
* histogram would be greater than <code>min</code> or the upper value would
|
||||
* be less than <code>max</code>, empty buckets will be generated.
|
||||
*/
|
||||
public DateHistogramBuilder extendedBounds(DateTime min, DateTime max) {
|
||||
extendedBoundsMin = min;
|
||||
extendedBoundsMax = max;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (interval == null) {
|
||||
throw new SearchSourceBuilderException("[interval] must be defined for histogram aggregation [" + getName() + "]");
|
||||
}
|
||||
if (interval instanceof Number) {
|
||||
interval = TimeValue.timeValueMillis(((Number) interval).longValue()).toString();
|
||||
}
|
||||
builder.field("interval", interval);
|
||||
|
||||
if (minDocCount != null) {
|
||||
builder.field("min_doc_count", minDocCount);
|
||||
}
|
||||
|
||||
if (order != null) {
|
||||
builder.field("order");
|
||||
order.toXContent(builder, params);
|
||||
}
|
||||
|
||||
if (timeZone != null) {
|
||||
builder.field("time_zone", timeZone);
|
||||
}
|
||||
|
||||
if (offset != null) {
|
||||
builder.field("offset", offset);
|
||||
}
|
||||
|
||||
if (format != null) {
|
||||
builder.field("format", format);
|
||||
}
|
||||
|
||||
if (extendedBoundsMin != null || extendedBoundsMax != null) {
|
||||
builder.startObject(ExtendedBounds.EXTENDED_BOUNDS_FIELD.getPreferredName());
|
||||
if (extendedBoundsMin != null) {
|
||||
builder.field("min", extendedBoundsMin);
|
||||
}
|
||||
if (extendedBoundsMax != null) {
|
||||
builder.field("max", extendedBoundsMax);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* The interval the date histogram is based on.
|
||||
|
@ -74,6 +75,23 @@ public class DateHistogramInterval implements Writeable<DateHistogramInterval> {
|
|||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
DateHistogramInterval other = (DateHistogramInterval) obj;
|
||||
return Objects.equals(expression, other.expression);
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateHistogramInterval readFrom(StreamInput in) throws IOException {
|
||||
return new DateHistogramInterval(in.readString());
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.rounding.Rounding;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregator.DateHistogramFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
|
@ -97,15 +96,11 @@ public class DateHistogramParser extends HistogramParser {
|
|||
|
||||
@Override
|
||||
protected long parseStringOffset(String offset) throws IOException {
|
||||
if (offset.charAt(0) == '-') {
|
||||
return -TimeValue.parseTimeValue(offset.substring(1), null, getClass().getSimpleName() + ".parseOffset").millis();
|
||||
}
|
||||
int beginIndex = offset.charAt(0) == '+' ? 1 : 0;
|
||||
return TimeValue.parseTimeValue(offset.substring(beginIndex), null, getClass().getSimpleName() + ".parseOffset").millis();
|
||||
return DateHistogramFactory.parseStringOffset(offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { HistogramAggregator.DateHistogramFactory.PROTOTYPE };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return HistogramAggregator.DateHistogramFactory.PROTOTYPE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,6 +59,11 @@ public class ExtendedBounds implements ToXContent {
|
|||
this.max = max;
|
||||
}
|
||||
|
||||
public ExtendedBounds(String minAsStr, String maxAsStr) {
|
||||
this.minAsStr = minAsStr;
|
||||
this.maxAsStr = maxAsStr;
|
||||
}
|
||||
|
||||
void processAndValidate(String aggName, SearchContext context, ValueParser parser) {
|
||||
assert parser != null;
|
||||
if (minAsStr != null) {
|
||||
|
@ -90,6 +95,8 @@ public class ExtendedBounds implements ToXContent {
|
|||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeOptionalString(minAsStr);
|
||||
out.writeOptionalString(maxAsStr);
|
||||
}
|
||||
|
||||
static ExtendedBounds readFrom(StreamInput in) throws IOException {
|
||||
|
@ -100,6 +107,8 @@ public class ExtendedBounds implements ToXContent {
|
|||
if (in.readBoolean()) {
|
||||
bounds.max = in.readLong();
|
||||
}
|
||||
bounds.minAsStr = in.readOptionalString();
|
||||
bounds.maxAsStr = in.readOptionalString();
|
||||
return bounds;
|
||||
}
|
||||
|
||||
|
@ -139,9 +148,13 @@ public class ExtendedBounds implements ToXContent {
|
|||
builder.startObject(EXTENDED_BOUNDS_FIELD.getPreferredName());
|
||||
if (min != null) {
|
||||
builder.field(MIN_FIELD.getPreferredName(), min);
|
||||
} else {
|
||||
builder.field(MIN_FIELD.getPreferredName(), minAsStr);
|
||||
}
|
||||
if (max != null) {
|
||||
builder.field(MAX_FIELD.getPreferredName(), max);
|
||||
} else {
|
||||
builder.field(MAX_FIELD.getPreferredName(), maxAsStr);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
|
|
@ -162,7 +162,7 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
Releasables.close(bucketOrds);
|
||||
}
|
||||
|
||||
public static class Factory<AF extends Factory<AF>> extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, Factory<AF>> {
|
||||
public static class Factory<AF extends Factory<AF>> extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, AF> {
|
||||
|
||||
public static final Factory PROTOTYPE = new Factory("");
|
||||
|
||||
|
@ -286,7 +286,8 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
|
||||
builder.field(Rounding.Interval.INTERVAL_FIELD.getPreferredName(), interval);
|
||||
builder.field(Rounding.Interval.INTERVAL_FIELD.getPreferredName());
|
||||
doXContentInterval(builder, params);
|
||||
builder.field(Rounding.OffsetRounding.OFFSET_FIELD.getPreferredName(), offset);
|
||||
|
||||
if (order != null) {
|
||||
|
@ -305,6 +306,11 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
return builder;
|
||||
}
|
||||
|
||||
protected XContentBuilder doXContentInterval(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.value(interval);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return InternalHistogram.TYPE.name();
|
||||
|
@ -413,6 +419,20 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
return this;
|
||||
}
|
||||
|
||||
public DateHistogramFactory offset(String offset) {
|
||||
return offset(parseStringOffset(offset));
|
||||
}
|
||||
|
||||
protected static long parseStringOffset(String offset) {
|
||||
if (offset.charAt(0) == '-') {
|
||||
return -TimeValue.parseTimeValue(offset.substring(1), null, DateHistogramFactory.class.getSimpleName() + ".parseOffset")
|
||||
.millis();
|
||||
}
|
||||
int beginIndex = offset.charAt(0) == '+' ? 1 : 0;
|
||||
return TimeValue.parseTimeValue(offset.substring(beginIndex), null, DateHistogramFactory.class.getSimpleName() + ".parseOffset")
|
||||
.millis();
|
||||
}
|
||||
|
||||
public DateHistogramInterval dateHistogramInterval() {
|
||||
return dateHistogramInterval;
|
||||
}
|
||||
|
@ -420,6 +440,7 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
@Override
|
||||
protected Rounding createRounding() {
|
||||
TimeZoneRounding.Builder tzRoundingBuilder;
|
||||
if (dateHistogramInterval != null) {
|
||||
DateTimeUnit dateTimeUnit = DATE_FIELD_UNITS.get(dateHistogramInterval.toString());
|
||||
if (dateTimeUnit != null) {
|
||||
tzRoundingBuilder = TimeZoneRounding.builder(dateTimeUnit);
|
||||
|
@ -428,6 +449,10 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
tzRoundingBuilder = TimeZoneRounding.builder(TimeValue.parseTimeValue(dateHistogramInterval.toString(), null, getClass()
|
||||
.getSimpleName() + ".interval"));
|
||||
}
|
||||
} else {
|
||||
// the interval is an integer time value in millis?
|
||||
tzRoundingBuilder = TimeZoneRounding.builder(TimeValue.timeValueMillis(interval()));
|
||||
}
|
||||
if (timeZone() != null) {
|
||||
tzRoundingBuilder.timeZone(timeZone());
|
||||
}
|
||||
|
@ -454,6 +479,16 @@ public class HistogramAggregator extends BucketsAggregator {
|
|||
return InternalDateHistogram.TYPE.name();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentInterval(XContentBuilder builder, Params params) throws IOException {
|
||||
if (dateHistogramInterval == null) {
|
||||
super.doXContentInterval(builder, params);
|
||||
} else {
|
||||
builder.value(dateHistogramInterval.toString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateHistogramFactory createFactoryFromStream(String name, StreamInput in)
|
||||
throws IOException {
|
||||
|
|
|
@ -1,134 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.histogram;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Histogram} aggregation.
|
||||
*/
|
||||
public class HistogramBuilder extends ValuesSourceAggregationBuilder<HistogramBuilder> {
|
||||
|
||||
private Long interval;
|
||||
private Histogram.Order order;
|
||||
private Long minDocCount;
|
||||
private Long extendedBoundsMin;
|
||||
private Long extendedBoundsMax;
|
||||
private Long offset;
|
||||
|
||||
/**
|
||||
* Constructs a new histogram aggregation builder.
|
||||
*
|
||||
* @param name The name of the aggregation (will serve as the unique identifier for the aggregation result in the response)
|
||||
*/
|
||||
public HistogramBuilder(String name) {
|
||||
super(name, InternalHistogram.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the interval for the histogram.
|
||||
*
|
||||
* @param interval The interval for the histogram
|
||||
* @return This builder
|
||||
*/
|
||||
public HistogramBuilder interval(long interval) {
|
||||
this.interval = interval;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the order by which the buckets will be returned.
|
||||
*
|
||||
* @param order The order by which the buckets will be returned
|
||||
* @return This builder
|
||||
*/
|
||||
public HistogramBuilder order(Histogram.Order order) {
|
||||
this.order = order;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the minimum document count per bucket. Buckets with less documents than this min value will not be returned.
|
||||
*
|
||||
* @param minDocCount The minimum document count per bucket
|
||||
* @return This builder
|
||||
*/
|
||||
public HistogramBuilder minDocCount(long minDocCount) {
|
||||
this.minDocCount = minDocCount;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set extended bounds for the histogram. In case the lower value in the
|
||||
* histogram would be greater than <code>min</code> or the upper value would
|
||||
* be less than <code>max</code>, empty buckets will be generated.
|
||||
*/
|
||||
public HistogramBuilder extendedBounds(Long min, Long max) {
|
||||
extendedBoundsMin = min;
|
||||
extendedBoundsMax = max;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the offset to apply to shift bucket boundaries.
|
||||
*/
|
||||
public HistogramBuilder offset(long offset) {
|
||||
this.offset = offset;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (interval == null) {
|
||||
throw new SearchSourceBuilderException("[interval] must be defined for histogram aggregation [" + getName() + "]");
|
||||
}
|
||||
builder.field("interval", interval);
|
||||
|
||||
if (order != null) {
|
||||
builder.field("order");
|
||||
order.toXContent(builder, params);
|
||||
}
|
||||
|
||||
if (offset != null) {
|
||||
builder.field("offset", offset);
|
||||
}
|
||||
|
||||
if (minDocCount != null) {
|
||||
builder.field("min_doc_count", minDocCount);
|
||||
}
|
||||
|
||||
if (extendedBoundsMin != null || extendedBoundsMax != null) {
|
||||
builder.startObject(ExtendedBounds.EXTENDED_BOUNDS_FIELD.getPreferredName());
|
||||
if (extendedBoundsMin != null) {
|
||||
builder.field("min", extendedBoundsMin);
|
||||
}
|
||||
if (extendedBoundsMax != null) {
|
||||
builder.field("max", extendedBoundsMax);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
|
@ -163,7 +163,7 @@ public class HistogramParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { HistogramAggregator.Factory.PROTOTYPE };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return HistogramAggregator.Factory.PROTOTYPE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.missing;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Missing} aggregation.
|
||||
*/
|
||||
public class MissingBuilder extends AggregationBuilder<MissingBuilder> {
|
||||
|
||||
private String field;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public MissingBuilder(String name) {
|
||||
super(name, InternalMissing.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the field to count missing values on.
|
||||
*/
|
||||
public MissingBuilder field(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (field != null) {
|
||||
builder.field("field", field);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
}
|
|
@ -53,7 +53,7 @@ public class MissingParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new MissingAggregator.Factory(null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new MissingAggregator.Factory(null, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.nested;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Nested} aggregation.
|
||||
*/
|
||||
public class NestedBuilder extends AggregationBuilder<NestedBuilder> {
|
||||
|
||||
private String path;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public NestedBuilder(String name) {
|
||||
super(name, InternalNested.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the path to use for this nested aggregation. The path must match
|
||||
* the path to a nested object in the mappings. This parameter is
|
||||
* compulsory.
|
||||
*/
|
||||
public NestedBuilder path(String path) {
|
||||
this.path = path;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (path == null) {
|
||||
throw new SearchSourceBuilderException("nested path must be set on nested aggregation [" + getName() + "]");
|
||||
}
|
||||
builder.field("path", path);
|
||||
return builder.endObject();
|
||||
}
|
||||
}
|
|
@ -66,7 +66,7 @@ public class NestedParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new NestedAggregator.Factory(null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new NestedAggregator.Factory(null, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.nested;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link ReverseNested} aggregation.
|
||||
*/
|
||||
public class ReverseNestedBuilder extends AggregationBuilder<ReverseNestedBuilder> {
|
||||
|
||||
private String path;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public ReverseNestedBuilder(String name) {
|
||||
super(name, InternalReverseNested.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the path to use for this nested aggregation. The path must match
|
||||
* the path to a nested object in the mappings. If it is not specified
|
||||
* then this aggregation will go back to the root document.
|
||||
*/
|
||||
public ReverseNestedBuilder path(String path) {
|
||||
this.path = path;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (path != null) {
|
||||
builder.field("path", path);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
}
|
|
@ -65,7 +65,7 @@ public class ReverseNestedParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new ReverseNestedAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new ReverseNestedAggregator.Factory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.range;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class AbstractRangeBuilder<B extends AbstractRangeBuilder<B>> extends ValuesSourceAggregationBuilder<B> {
|
||||
|
||||
protected static class Range implements ToXContent {
|
||||
|
||||
private String key;
|
||||
private Object from;
|
||||
private Object to;
|
||||
|
||||
public Range(String key, Object from, Object to) {
|
||||
this.key = key;
|
||||
this.from = from;
|
||||
this.to = to;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (key != null) {
|
||||
builder.field("key", key);
|
||||
}
|
||||
if (from != null) {
|
||||
builder.field("from", from);
|
||||
}
|
||||
if (to != null) {
|
||||
builder.field("to", to);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
protected List<Range> ranges = new ArrayList<>();
|
||||
|
||||
protected AbstractRangeBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (ranges.isEmpty()) {
|
||||
throw new SearchSourceBuilderException("at least one range must be defined for range aggregation [" + getName() + "]");
|
||||
}
|
||||
builder.startArray("ranges");
|
||||
for (Range range : ranges) {
|
||||
range.toXContent(builder, params);
|
||||
}
|
||||
return builder.endArray();
|
||||
}
|
||||
}
|
|
@ -75,8 +75,8 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
protected double to = Double.POSITIVE_INFINITY;
|
||||
protected String toAsStr;
|
||||
|
||||
public Range(String key, double from, double to) {
|
||||
this(key, from, null, to, null);
|
||||
public Range(String key, Double from, Double to) {
|
||||
this(key, from == null ? Double.NEGATIVE_INFINITY : from, null, to == null ? Double.POSITIVE_INFINITY : to, null);
|
||||
}
|
||||
|
||||
public Range(String key, String from, String to) {
|
||||
|
@ -396,20 +396,25 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
}
|
||||
}
|
||||
|
||||
public static class Factory<AF extends Factory<AF>> extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, AF> {
|
||||
public static abstract class AbstractFactory<AF extends AbstractFactory<AF, R>, R extends Range>
|
||||
extends ValuesSourceAggregatorFactory<ValuesSource.Numeric, AF> {
|
||||
|
||||
private final InternalRange.Factory rangeFactory;
|
||||
private final List<? extends Range> ranges;
|
||||
private List<R> ranges = new ArrayList<>();
|
||||
private boolean keyed = false;
|
||||
|
||||
public Factory(String name, List<? extends Range> ranges) {
|
||||
this(name, InternalRange.FACTORY, ranges);
|
||||
}
|
||||
|
||||
protected Factory(String name, InternalRange.Factory rangeFactory, List<? extends Range> ranges) {
|
||||
protected AbstractFactory(String name, InternalRange.Factory rangeFactory) {
|
||||
super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||
this.rangeFactory = rangeFactory;
|
||||
this.ranges = ranges;
|
||||
}
|
||||
|
||||
public AF addRange(R range) {
|
||||
ranges.add(range);
|
||||
return (AF) this;
|
||||
}
|
||||
|
||||
public List<R> ranges() {
|
||||
return ranges;
|
||||
}
|
||||
|
||||
public AF keyed(boolean keyed) {
|
||||
|
@ -443,19 +448,12 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
@Override
|
||||
protected AF innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
Factory<AF> factory = createFactoryFromStream(name, in);
|
||||
AbstractFactory<AF, R> factory = createFactoryFromStream(name, in);
|
||||
factory.keyed = in.readBoolean();
|
||||
return (AF) factory;
|
||||
}
|
||||
|
||||
protected Factory<AF> createFactoryFromStream(String name, StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
List<Range> ranges = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
ranges.add(Range.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
return new Factory<AF>(name, ranges);
|
||||
}
|
||||
protected abstract AbstractFactory<AF, R> createFactoryFromStream(String name, StreamInput in) throws IOException;
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
|
@ -473,10 +471,93 @@ public class RangeAggregator extends BucketsAggregator {
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
Factory other = (Factory) obj;
|
||||
AbstractFactory<AF, R> other = (AbstractFactory<AF, R>) obj;
|
||||
return Objects.equals(ranges, other.ranges)
|
||||
&& Objects.equals(keyed, other.keyed);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Factory extends AbstractFactory<Factory, Range> {
|
||||
|
||||
public Factory(String name) {
|
||||
super(name, InternalRange.FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public Factory addRange(String key, double from, double to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, double, double)} but the key will be
|
||||
* automatically generated based on <code>from</code> and
|
||||
* <code>to</code>.
|
||||
*/
|
||||
public Factory addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public Factory addUnboundedTo(String key, double to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public Factory addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public Factory addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public Factory addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Factory createFactoryFromStream(String name, StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
Factory factory = new Factory(name);
|
||||
for (int i = 0; i < size; i++) {
|
||||
factory.addRange(Range.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,115 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.range;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder.Range} aggregation.
|
||||
*/
|
||||
public class RangeBuilder extends AbstractRangeBuilder<RangeBuilder> {
|
||||
|
||||
private String format;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public RangeBuilder(String name) {
|
||||
super(name, InternalRange.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public RangeBuilder addRange(String key, double from, double to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, double, double)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public RangeBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public RangeBuilder addUnboundedTo(String key, double to) {
|
||||
ranges.add(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public RangeBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
*/
|
||||
public RangeBuilder addUnboundedFrom(String key, double from) {
|
||||
ranges.add(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public RangeBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the format to use to display values.
|
||||
*/
|
||||
public RangeBuilder format(String format) {
|
||||
this.format = format;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.doInternalXContent(builder, params);
|
||||
if (format != null) {
|
||||
builder.field("format", format);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
}
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -53,10 +52,13 @@ public class RangeParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected RangeAggregator.Factory<?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected RangeAggregator.AbstractFactory<?, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
RangeAggregator.Factory factory = new RangeAggregator.Factory(aggregationName);
|
||||
List<? extends Range> ranges = (List<? extends Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
RangeAggregator.Factory factory = new RangeAggregator.Factory(aggregationName, ranges);
|
||||
for (Range range : ranges) {
|
||||
factory.addRange(range);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
|
@ -92,7 +94,7 @@ public class RangeParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new RangeAggregator.Factory(null, Collections.emptyList()) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new RangeAggregator.Factory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,17 +20,17 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.range.date;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Factory;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.AbstractFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class DateRangeAggregatorFactory extends Factory<DateRangeAggregatorFactory> {
|
||||
public class DateRangeAggregatorFactory extends AbstractFactory<DateRangeAggregatorFactory, RangeAggregator.Range> {
|
||||
|
||||
public DateRangeAggregatorFactory(String name, List<Range> ranges) {
|
||||
super(name, InternalDateRange.FACTORY, ranges);
|
||||
public DateRangeAggregatorFactory(String name) {
|
||||
super(name, InternalDateRange.FACTORY);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -38,14 +38,217 @@ public class DateRangeAggregatorFactory extends Factory<DateRangeAggregatorFacto
|
|||
return InternalDateRange.TYPE.name();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the dates, inclusive
|
||||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addRange(String key, String from, String to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, String, String)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addRange(String from, String to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedTo(String key, String to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, String)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedTo(String to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedFrom(String key, String from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, String)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedFrom(String from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the dates, inclusive
|
||||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addRange(String key, double from, double to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, double, double)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedTo(String key, double to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the dates, inclusive
|
||||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addRange(String key, DateTime from, DateTime to) {
|
||||
addRange(new Range(key, convertDateTime(from), convertDateTime(to)));
|
||||
return this;
|
||||
}
|
||||
|
||||
private Double convertDateTime(DateTime dateTime) {
|
||||
if (dateTime == null) {
|
||||
return null;
|
||||
} else {
|
||||
return (double) dateTime.getMillis();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, DateTime, DateTime)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addRange(DateTime from, DateTime to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedTo(String key, DateTime to) {
|
||||
addRange(new Range(key, null, convertDateTime(to)));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, DateTime)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedTo(DateTime to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedFrom(String key, DateTime from) {
|
||||
addRange(new Range(key, convertDateTime(from), null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorFactory addUnboundedFrom(DateTime from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DateRangeAggregatorFactory createFactoryFromStream(String name, StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
List<Range> ranges = new ArrayList<>(size);
|
||||
DateRangeAggregatorFactory factory = new DateRangeAggregatorFactory(name);
|
||||
for (int i = 0; i < size; i++) {
|
||||
ranges.add(Range.PROTOTYPE.readFrom(in));
|
||||
factory.addRange(Range.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
return new DateRangeAggregatorFactory(name, ranges);
|
||||
return factory;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,114 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.range.date;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@code DateRange} aggregation.
|
||||
*/
|
||||
public class DateRangeBuilder extends AbstractRangeBuilder<DateRangeBuilder> {
|
||||
|
||||
private String format;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public DateRangeBuilder(String name) {
|
||||
super(name, InternalDateRange.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public DateRangeBuilder addRange(String key, Object from, Object to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, Object, Object)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeBuilder addRange(Object from, Object to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public DateRangeBuilder addUnboundedTo(String key, Object to) {
|
||||
ranges.add(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, Object)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeBuilder addUnboundedTo(Object to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeBuilder addUnboundedFrom(String key, Object from) {
|
||||
ranges.add(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, Object)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeBuilder addUnboundedFrom(Object from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the format to use to display values.
|
||||
*/
|
||||
public DateRangeBuilder format(String format) {
|
||||
this.format = format;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.doInternalXContent(builder, params);
|
||||
if (format != null) {
|
||||
builder.field("format", format);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -47,8 +46,11 @@ public class DateRangeParser extends RangeParser {
|
|||
@Override
|
||||
protected DateRangeAggregatorFactory createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateRangeAggregatorFactory factory = new DateRangeAggregatorFactory(aggregationName);
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
DateRangeAggregatorFactory factory = new DateRangeAggregatorFactory(aggregationName, ranges);
|
||||
for (Range range : ranges) {
|
||||
factory.addRange(range);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
|
@ -57,7 +59,7 @@ public class DateRangeParser extends RangeParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new DateRangeAggregatorFactory(null, Collections.emptyList()) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new DateRangeAggregatorFactory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,260 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.range.geodistance;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Builder for the {@link GeoDistance} aggregation.
|
||||
*/
|
||||
public class GeoDistanceBuilder extends AggregationBuilder<GeoDistanceBuilder> {
|
||||
|
||||
/**
|
||||
* A range of values.
|
||||
*/
|
||||
public static class Range implements ToXContent {
|
||||
|
||||
private String key;
|
||||
private Double from;
|
||||
private Double to;
|
||||
|
||||
/**
|
||||
* Create a new range.
|
||||
* @param key the identifier of this range
|
||||
* @param from the lower bound (inclusive)
|
||||
* @param to the upper bound (exclusive)
|
||||
*/
|
||||
public Range(String key, Double from, Double to) {
|
||||
this.key = key;
|
||||
this.from = from;
|
||||
this.to = to;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (from != null) {
|
||||
builder.field("from", from.doubleValue());
|
||||
}
|
||||
if (to != null) {
|
||||
builder.field("to", to.doubleValue());
|
||||
}
|
||||
if (key != null) {
|
||||
builder.field("key", key);
|
||||
}
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String field;
|
||||
private DistanceUnit unit;
|
||||
private GeoDistance distanceType;
|
||||
private GeoPoint point;
|
||||
|
||||
private List<Range> ranges = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public GeoDistanceBuilder(String name) {
|
||||
super(name, InternalGeoDistance.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the field to use to compute distances.
|
||||
*/
|
||||
public GeoDistanceBuilder field(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the unit to use for distances, default is kilometers.
|
||||
*/
|
||||
public GeoDistanceBuilder unit(DistanceUnit unit) {
|
||||
this.unit = unit;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the {@link GeoDistance distance type} to use, defaults to
|
||||
* {@link GeoDistance#SLOPPY_ARC}.
|
||||
*/
|
||||
public GeoDistanceBuilder distanceType(GeoDistance distanceType) {
|
||||
this.distanceType = distanceType;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the point to calculate distances from using a
|
||||
* <code>lat,lon</code> notation or geohash.
|
||||
*/
|
||||
public GeoDistanceBuilder point(String latLon) {
|
||||
return point(GeoPoint.parseFromLatLon(latLon));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the point to calculate distances from.
|
||||
*/
|
||||
public GeoDistanceBuilder point(GeoPoint point) {
|
||||
this.point = point;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the point to calculate distances from using its geohash.
|
||||
*/
|
||||
public GeoDistanceBuilder geohash(String geohash) {
|
||||
if (this.point == null) {
|
||||
this.point = new GeoPoint();
|
||||
}
|
||||
this.point.resetFromGeoHash(geohash);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the latitude of the point to calculate distances from.
|
||||
*/
|
||||
public GeoDistanceBuilder lat(double lat) {
|
||||
if (this.point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetLat(lat);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the longitude of the point to calculate distances from.
|
||||
*/
|
||||
public GeoDistanceBuilder lon(double lon) {
|
||||
if (this.point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetLon(lon);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceBuilder addRange(String key, double from, double to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, double, double)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public GeoDistanceBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceBuilder addUnboundedTo(String key, double to) {
|
||||
ranges.add(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
*/
|
||||
public GeoDistanceBuilder addUnboundedFrom(String key, double from) {
|
||||
ranges.add(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (ranges.isEmpty()) {
|
||||
throw new SearchSourceBuilderException("at least one range must be defined for geo_distance aggregation [" + getName() + "]");
|
||||
}
|
||||
if (point == null) {
|
||||
throw new SearchSourceBuilderException("center point must be defined for geo_distance aggregation [" + getName() + "]");
|
||||
}
|
||||
|
||||
if (field != null) {
|
||||
builder.field("field", field);
|
||||
}
|
||||
|
||||
if (unit != null) {
|
||||
builder.field("unit", unit);
|
||||
}
|
||||
|
||||
if (distanceType != null) {
|
||||
builder.field("distance_type", distanceType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
builder.startObject("center")
|
||||
.field("lat", point.lat())
|
||||
.field("lon", point.lon())
|
||||
.endObject();
|
||||
|
||||
builder.startArray("ranges");
|
||||
for (Range range : ranges) {
|
||||
range.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
}
|
|
@ -50,7 +50,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -77,20 +76,20 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
|
||||
public static class Range extends RangeAggregator.Range {
|
||||
|
||||
static final Range PROTOTYPE = new Range(null, -1, -1);
|
||||
static final Range PROTOTYPE = new Range(null, null, null);
|
||||
|
||||
public Range(String key, double from, double to) {
|
||||
public Range(String key, Double from, Double to) {
|
||||
super(key(key, from, to), from, to);
|
||||
}
|
||||
|
||||
private static String key(String key, double from, double to) {
|
||||
private static String key(String key, Double from, Double to) {
|
||||
if (key != null) {
|
||||
return key;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(from == 0 ? "*" : from);
|
||||
sb.append((from == null || from == 0) ? "*" : from);
|
||||
sb.append("-");
|
||||
sb.append(Double.isInfinite(to) ? "*" : to);
|
||||
sb.append((to == null || Double.isInfinite(to)) ? "*" : to);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
|
@ -115,8 +114,11 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
protected GeoDistanceFactory createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoPoint origin = (GeoPoint) otherOptions.get(ORIGIN_FIELD);
|
||||
GeoDistanceFactory factory = new GeoDistanceFactory(aggregationName, origin);
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
GeoDistanceFactory factory = new GeoDistanceFactory(aggregationName, origin, ranges);
|
||||
for (Range range : ranges) {
|
||||
factory.addRange(range);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
|
@ -199,20 +201,94 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
|
||||
private final GeoPoint origin;
|
||||
private final InternalRange.Factory rangeFactory;
|
||||
private final List<Range> ranges;
|
||||
private List<Range> ranges = new ArrayList<>();
|
||||
private DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
private GeoDistance distanceType = GeoDistance.DEFAULT;
|
||||
private boolean keyed = false;
|
||||
|
||||
public GeoDistanceFactory(String name, GeoPoint origin, List<Range> ranges) {
|
||||
this(name, origin, InternalGeoDistance.FACTORY, ranges);
|
||||
public GeoDistanceFactory(String name, GeoPoint origin) {
|
||||
this(name, origin, InternalGeoDistance.FACTORY);
|
||||
}
|
||||
|
||||
private GeoDistanceFactory(String name, GeoPoint origin, InternalRange.Factory rangeFactory, List<Range> ranges) {
|
||||
private GeoDistanceFactory(String name, GeoPoint origin, InternalRange.Factory rangeFactory) {
|
||||
super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||
this.origin = origin;
|
||||
this.rangeFactory = rangeFactory;
|
||||
this.ranges = ranges;
|
||||
}
|
||||
|
||||
public GeoDistanceFactory addRange(Range range) {
|
||||
ranges.add(range);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceFactory addRange(String key, double from, double to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, double, double)} but the key will be
|
||||
* automatically generated based on <code>from</code> and
|
||||
* <code>to</code>.
|
||||
*/
|
||||
public GeoDistanceFactory addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no lower bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceFactory addUnboundedTo(String key, double to) {
|
||||
ranges.add(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceFactory addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range with no upper bound.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public GeoDistanceFactory addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceFactory addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
public List<Range> range() {
|
||||
return ranges;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -280,11 +356,10 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
String name, ValuesSourceType valuesSourceType, ValueType targetValueType, StreamInput in) throws IOException {
|
||||
GeoPoint origin = new GeoPoint(in.readDouble(), in.readDouble());
|
||||
int size = in.readVInt();
|
||||
List<Range> ranges = new ArrayList<>(size);
|
||||
GeoDistanceFactory factory = new GeoDistanceFactory(name, origin);
|
||||
for (int i = 0; i < size; i++) {
|
||||
ranges.add(Range.PROTOTYPE.readFrom(in));
|
||||
factory.addRange(Range.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
GeoDistanceFactory factory = new GeoDistanceFactory(name, origin, ranges);
|
||||
factory.keyed = in.readBoolean();
|
||||
factory.distanceType = GeoDistance.readGeoDistanceFrom(in);
|
||||
factory.unit = DistanceUnit.readDistanceUnit(in);
|
||||
|
@ -361,8 +436,8 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new GeoDistanceFactory(null, null, Collections.emptyList()) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new GeoDistanceFactory(null, null);
|
||||
}
|
||||
|
||||
}
|
|
@ -27,17 +27,15 @@ import org.elasticsearch.common.network.Cidrs;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Factory;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.AbstractFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class IPv4RangeAggregatorFactory extends Factory<IPv4RangeAggregatorFactory> {
|
||||
public class IPv4RangeAggregatorFactory extends AbstractFactory<IPv4RangeAggregatorFactory, IPv4RangeAggregatorFactory.Range> {
|
||||
|
||||
public IPv4RangeAggregatorFactory(String name, List<Range> ranges) {
|
||||
super(name, InternalIPv4Range.FACTORY, ranges);
|
||||
public IPv4RangeAggregatorFactory(String name) {
|
||||
super(name, InternalIPv4Range.FACTORY);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -45,14 +43,87 @@ public class IPv4RangeAggregatorFactory extends Factory<IPv4RangeAggregatorFacto
|
|||
return InternalIPv4Range.TYPE.name();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key
|
||||
* the key to use for this range in the response
|
||||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addRange(String key, String from, String to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addMaskRange(String, String)} but uses the mask itself as
|
||||
* a key.
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addMaskRange(String key, String mask) {
|
||||
return addRange(new Range(key, mask));
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addMaskRange(String, String)} but uses the mask itself as
|
||||
* a key.
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addMaskRange(String mask) {
|
||||
return addRange(new Range(mask, mask));
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, String, String)} but the key will be
|
||||
* automatically generated.
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addRange(String from, String to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, String, String)} but there will be no
|
||||
* lower bound.
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addUnboundedTo(String key, String to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addUnboundedTo(String to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, String, String)} but there will be no
|
||||
* upper bound.
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addUnboundedFrom(String key, String from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IPv4RangeAggregatorFactory addUnboundedFrom(String from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IPv4RangeAggregatorFactory createFactoryFromStream(String name, StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
List<Range> ranges = new ArrayList<>(size);
|
||||
IPv4RangeAggregatorFactory factory = new IPv4RangeAggregatorFactory(name);
|
||||
for (int i = 0; i < size; i++) {
|
||||
ranges.add(Range.PROTOTYPE.readFrom(in));
|
||||
factory.addRange(Range.PROTOTYPE.readFrom(in));
|
||||
}
|
||||
return new IPv4RangeAggregatorFactory(name, ranges);
|
||||
return factory;
|
||||
}
|
||||
|
||||
public static class Range extends RangeAggregator.Range {
|
||||
|
|
|
@ -1,110 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.range.ipv4;
|
||||
|
||||
import org.elasticsearch.common.network.Cidrs;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
/**
|
||||
* Builder for the {@code IPv4Range} aggregation.
|
||||
*/
|
||||
public class IPv4RangeBuilder extends AbstractRangeBuilder<IPv4RangeBuilder> {
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public IPv4RangeBuilder(String name) {
|
||||
super(name, InternalIPv4Range.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new range to this aggregation.
|
||||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public IPv4RangeBuilder addRange(String key, String from, String to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addMaskRange(String, String)} but uses the mask itself as a key.
|
||||
*/
|
||||
public IPv4RangeBuilder addMaskRange(String mask) {
|
||||
return addMaskRange(mask, mask);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a range based on a CIDR mask.
|
||||
*/
|
||||
public IPv4RangeBuilder addMaskRange(String key, String mask) {
|
||||
long[] fromTo;
|
||||
try {
|
||||
fromTo = Cidrs.cidrMaskToMinMax(mask);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new SearchSourceBuilderException("invalid CIDR mask [" + mask + "] in ip_range aggregation [" + getName() + "]", e);
|
||||
}
|
||||
ranges.add(new Range(key, fromTo[0] == 0 ? null : fromTo[0], fromTo[1] == InternalIPv4Range.MAX_IP ? null : fromTo[1]));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, String, String)} but the key will be
|
||||
* automatically generated.
|
||||
*/
|
||||
public IPv4RangeBuilder addRange(String from, String to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, String, String)} but there will be no lower bound.
|
||||
*/
|
||||
public IPv4RangeBuilder addUnboundedTo(String key, String to) {
|
||||
ranges.add(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedTo(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IPv4RangeBuilder addUnboundedTo(String to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addRange(String, String, String)} but there will be no upper bound.
|
||||
*/
|
||||
public IPv4RangeBuilder addUnboundedFrom(String key, String from) {
|
||||
ranges.add(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #addUnboundedFrom(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IPv4RangeBuilder addUnboundedFrom(String from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
}
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.support.ValueType;
|
|||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -55,9 +54,12 @@ public class IpRangeParser extends RangeParser {
|
|||
@Override
|
||||
protected IPv4RangeAggregatorFactory createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
IPv4RangeAggregatorFactory factory = new IPv4RangeAggregatorFactory(aggregationName);
|
||||
List<IPv4RangeAggregatorFactory.Range> ranges = (List<IPv4RangeAggregatorFactory.Range>) otherOptions
|
||||
.get(RangeAggregator.RANGES_FIELD);
|
||||
IPv4RangeAggregatorFactory factory = new IPv4RangeAggregatorFactory(aggregationName, ranges);
|
||||
for (IPv4RangeAggregatorFactory.Range range : ranges) {
|
||||
factory.addRange(range);
|
||||
}
|
||||
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
|
@ -66,8 +68,8 @@ public class IpRangeParser extends RangeParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new IPv4RangeAggregatorFactory(null, Collections.emptyList()) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new IPv4RangeAggregatorFactory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.sampler;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Sampler} aggregation.
|
||||
*/
|
||||
public class DiversifiedSamplerAggregationBuilder extends ValuesSourceAggregationBuilder<DiversifiedSamplerAggregationBuilder> {
|
||||
|
||||
private int shardSize = SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
|
||||
int maxDocsPerValue = SamplerAggregator.DiversifiedFactory.MAX_DOCS_PER_VALUE_DEFAULT;
|
||||
String executionHint = null;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public DiversifiedSamplerAggregationBuilder(String name) {
|
||||
super(name, SamplerAggregator.DiversifiedFactory.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the max num docs to be returned from each shard.
|
||||
*/
|
||||
public DiversifiedSamplerAggregationBuilder shardSize(int shardSize) {
|
||||
this.shardSize = shardSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DiversifiedSamplerAggregationBuilder maxDocsPerValue(int maxDocsPerValue) {
|
||||
this.maxDocsPerValue = maxDocsPerValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
public DiversifiedSamplerAggregationBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (shardSize != SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE) {
|
||||
builder.field(SamplerAggregator.SHARD_SIZE_FIELD.getPreferredName(), shardSize);
|
||||
}
|
||||
|
||||
if (maxDocsPerValue != SamplerAggregator.DiversifiedFactory.MAX_DOCS_PER_VALUE_DEFAULT) {
|
||||
builder.field(SamplerAggregator.MAX_DOCS_PER_VALUE_FIELD.getPreferredName(), maxDocsPerValue);
|
||||
}
|
||||
if (executionHint != null) {
|
||||
builder.field(SamplerAggregator.EXECUTION_HINT_FIELD.getPreferredName(), executionHint);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -47,8 +47,7 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser {
|
|||
@Override
|
||||
protected SamplerAggregator.DiversifiedFactory createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
SamplerAggregator.DiversifiedFactory factory = new SamplerAggregator.DiversifiedFactory(aggregationName, valuesSourceType,
|
||||
targetValueType);
|
||||
SamplerAggregator.DiversifiedFactory factory = new SamplerAggregator.DiversifiedFactory(aggregationName);
|
||||
Integer shardSize = (Integer) otherOptions.get(SamplerAggregator.SHARD_SIZE_FIELD);
|
||||
if (shardSize != null) {
|
||||
factory.shardSize(shardSize);
|
||||
|
@ -88,8 +87,8 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new SamplerAggregator.DiversifiedFactory(null, null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new SamplerAggregator.DiversifiedFactory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.sampler;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Sampler} aggregation.
|
||||
*/
|
||||
public class SamplerAggregationBuilder extends ValuesSourceAggregationBuilder<SamplerAggregationBuilder> {
|
||||
|
||||
private int shardSize = SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public SamplerAggregationBuilder(String name) {
|
||||
super(name, InternalSampler.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the max num docs to be returned from each shard.
|
||||
*/
|
||||
public SamplerAggregationBuilder shardSize(int shardSize) {
|
||||
this.shardSize = shardSize;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (shardSize != SamplerAggregator.Factory.DEFAULT_SHARD_SAMPLE_SIZE) {
|
||||
builder.field(SamplerAggregator.SHARD_SIZE_FIELD.getPreferredName(), shardSize);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -264,8 +264,8 @@ public class SamplerAggregator extends SingleBucketAggregator {
|
|||
private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT;
|
||||
private String executionHint = null;
|
||||
|
||||
public DiversifiedFactory(String name, ValuesSourceType valueSourceType, ValueType valueType) {
|
||||
super(name, TYPE, valueSourceType, valueType);
|
||||
public DiversifiedFactory(String name) {
|
||||
super(name, TYPE, ValuesSourceType.ANY, null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -372,7 +372,7 @@ public class SamplerAggregator extends SingleBucketAggregator {
|
|||
@Override
|
||||
protected DiversifiedFactory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
DiversifiedFactory factory = new DiversifiedFactory(name, valuesSourceType, targetValueType);
|
||||
DiversifiedFactory factory = new DiversifiedFactory(name);
|
||||
factory.shardSize = in.readVInt();
|
||||
factory.maxDocsPerValue = in.readVInt();
|
||||
factory.executionHint = in.readOptionalString();
|
||||
|
|
|
@ -68,8 +68,8 @@ public class SamplerParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new SamplerAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new SamplerAggregator.Factory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -161,8 +161,8 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
return new TermsAggregator.BucketCountThresholds(bucketCountThresholds);
|
||||
}
|
||||
|
||||
public SignificantTermsAggregatorFactory(String name, ValuesSourceType valuesSourceType, ValueType valueType) {
|
||||
super(name, SignificantStringTerms.TYPE, valuesSourceType, valueType);
|
||||
public SignificantTermsAggregatorFactory(String name, ValueType valueType) {
|
||||
super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType);
|
||||
}
|
||||
|
||||
public TermsAggregator.BucketCountThresholds bucketCountThresholds() {
|
||||
|
@ -174,6 +174,44 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the size - indicating how many term buckets should be returned
|
||||
* (defaults to 10)
|
||||
*/
|
||||
public SignificantTermsAggregatorFactory size(int size) {
|
||||
bucketCountThresholds.setRequiredSize(size);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the shard_size - indicating the number of term buckets each shard
|
||||
* will return to the coordinating node (the node that coordinates the
|
||||
* search execution). The higher the shard size is, the more accurate the
|
||||
* results are.
|
||||
*/
|
||||
public SignificantTermsAggregatorFactory shardSize(int shardSize) {
|
||||
bucketCountThresholds.setShardSize(shardSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum document count terms should have in order to appear in
|
||||
* the response.
|
||||
*/
|
||||
public SignificantTermsAggregatorFactory minDocCount(long minDocCount) {
|
||||
bucketCountThresholds.setMinDocCount(minDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum document count terms should have on the shard in order to
|
||||
* appear in the response.
|
||||
*/
|
||||
public SignificantTermsAggregatorFactory shardMinDocCount(long shardMinDocCount) {
|
||||
bucketCountThresholds.setShardMinDocCount(shardMinDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: sets an execution hint to the aggregation.
|
||||
*/
|
||||
|
@ -399,7 +437,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
@Override
|
||||
protected SignificantTermsAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(name, valuesSourceType, targetValueType);
|
||||
SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(name, targetValueType);
|
||||
factory.bucketCountThresholds = BucketCountThresholds.readFromStream(in);
|
||||
factory.executionHint = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
|
|
|
@ -1,277 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.significant;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Creates an aggregation that finds interesting or unusual occurrences of terms in a result set.
|
||||
* <p>
|
||||
* This feature is marked as experimental, and may be subject to change in the future. If you
|
||||
* use this feature, please let us know your experience with it!
|
||||
*/
|
||||
public class SignificantTermsBuilder extends AggregationBuilder<SignificantTermsBuilder> {
|
||||
|
||||
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(-1, -1, -1, -1);
|
||||
|
||||
private String field;
|
||||
private String executionHint;
|
||||
private String includePattern;
|
||||
private int includeFlags;
|
||||
private String excludePattern;
|
||||
private int excludeFlags;
|
||||
private String[] includeTerms = null;
|
||||
private String[] excludeTerms = null;
|
||||
private QueryBuilder filterBuilder;
|
||||
private SignificanceHeuristicBuilder significanceHeuristicBuilder;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public SignificantTermsBuilder(String name) {
|
||||
super(name, SignificantStringTerms.TYPE.name());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the field to fetch significant terms from.
|
||||
*/
|
||||
public SignificantTermsBuilder field(String field) {
|
||||
this.field = field;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of significant terms to retrieve.
|
||||
*/
|
||||
public SignificantTermsBuilder size(int requiredSize) {
|
||||
bucketCountThresholds.setRequiredSize(requiredSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: Set the number of significant terms to retrieve on each shard.
|
||||
*/
|
||||
public SignificantTermsBuilder shardSize(int shardSize) {
|
||||
bucketCountThresholds.setShardSize(shardSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Only return significant terms that belong to at least <code>minDocCount</code> documents.
|
||||
*/
|
||||
public SignificantTermsBuilder minDocCount(int minDocCount) {
|
||||
bucketCountThresholds.setMinDocCount(minDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the background filter to compare to. Defaults to the whole index.
|
||||
*/
|
||||
public SignificantTermsBuilder backgroundFilter(QueryBuilder filter) {
|
||||
this.filterBuilder = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the minimum number of documents that a term should match to
|
||||
* be retrieved from a shard.
|
||||
*/
|
||||
public SignificantTermsBuilder shardMinDocCount(int shardMinDocCount) {
|
||||
bucketCountThresholds.setShardMinDocCount(shardMinDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: give an execution hint to this aggregation.
|
||||
*/
|
||||
public SignificantTermsBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a regular expression that will determine what terms should be aggregated. The regular expression is based
|
||||
* on the {@link java.util.regex.Pattern} class.
|
||||
*
|
||||
* @see #include(String, int)
|
||||
*/
|
||||
public SignificantTermsBuilder include(String regex) {
|
||||
return include(regex, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a regular expression that will determine what terms should be aggregated. The regular expression is based
|
||||
* on the {@link java.util.regex.Pattern} class.
|
||||
*
|
||||
* @see java.util.regex.Pattern#compile(String, int)
|
||||
*/
|
||||
public SignificantTermsBuilder include(String regex, int flags) {
|
||||
if (includeTerms != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
|
||||
}
|
||||
this.includePattern = regex;
|
||||
this.includeFlags = flags;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a set of terms that should be aggregated.
|
||||
*/
|
||||
public SignificantTermsBuilder include(String [] terms) {
|
||||
if (includePattern != null) {
|
||||
throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.includeTerms = terms;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a set of terms that should be aggregated.
|
||||
*/
|
||||
public SignificantTermsBuilder include(long [] terms) {
|
||||
if (includePattern != null) {
|
||||
throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.includeTerms = longsArrToStringArr(terms);
|
||||
return this;
|
||||
}
|
||||
|
||||
private String[] longsArrToStringArr(long[] terms) {
|
||||
String[] termsAsString = new String[terms.length];
|
||||
for (int i = 0; i < terms.length; i++) {
|
||||
termsAsString[i] = Long.toString(terms[i]);
|
||||
}
|
||||
return termsAsString;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular
|
||||
* expression is based on the {@link java.util.regex.Pattern} class.
|
||||
*
|
||||
* @see #exclude(String, int)
|
||||
*/
|
||||
public SignificantTermsBuilder exclude(String regex) {
|
||||
return exclude(regex, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular
|
||||
* expression is based on the {@link java.util.regex.Pattern} class.
|
||||
*
|
||||
* @see java.util.regex.Pattern#compile(String, int)
|
||||
*/
|
||||
public SignificantTermsBuilder exclude(String regex, int flags) {
|
||||
if (excludeTerms != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
|
||||
}
|
||||
this.excludePattern = regex;
|
||||
this.excludeFlags = flags;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a set of terms that should not be aggregated.
|
||||
*/
|
||||
public SignificantTermsBuilder exclude(String [] terms) {
|
||||
if (excludePattern != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
|
||||
}
|
||||
this.excludeTerms = terms;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Define a set of terms that should not be aggregated.
|
||||
*/
|
||||
public SignificantTermsBuilder exclude(long [] terms) {
|
||||
if (excludePattern != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of longs or a regex, not both");
|
||||
}
|
||||
this.excludeTerms = longsArrToStringArr(terms);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (field != null) {
|
||||
builder.field("field", field);
|
||||
}
|
||||
bucketCountThresholds.toXContent(builder, params);
|
||||
if (executionHint != null) {
|
||||
builder.field(TermsAggregatorFactory.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
}
|
||||
if (includePattern != null) {
|
||||
if (includeFlags == 0) {
|
||||
builder.field("include", includePattern);
|
||||
} else {
|
||||
builder.startObject("include")
|
||||
.field("pattern", includePattern)
|
||||
.field("flags", includeFlags)
|
||||
.endObject();
|
||||
}
|
||||
}
|
||||
if (includeTerms != null) {
|
||||
builder.array("include", includeTerms);
|
||||
}
|
||||
|
||||
if (excludePattern != null) {
|
||||
if (excludeFlags == 0) {
|
||||
builder.field("exclude", excludePattern);
|
||||
} else {
|
||||
builder.startObject("exclude")
|
||||
.field("pattern", excludePattern)
|
||||
.field("flags", excludeFlags)
|
||||
.endObject();
|
||||
}
|
||||
}
|
||||
if (excludeTerms != null) {
|
||||
builder.array("exclude", excludeTerms);
|
||||
}
|
||||
|
||||
if (filterBuilder != null) {
|
||||
builder.field(SignificantTermsAggregatorFactory.BACKGROUND_FILTER.getPreferredName());
|
||||
filterBuilder.toXContent(builder, params);
|
||||
}
|
||||
if (significanceHeuristicBuilder != null) {
|
||||
significanceHeuristicBuilder.toXContent(builder, params);
|
||||
}
|
||||
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the {@link SignificanceHeuristic} to use.
|
||||
*/
|
||||
public SignificantTermsBuilder significanceHeuristic(SignificanceHeuristicBuilder significanceHeuristicBuilder) {
|
||||
this.significanceHeuristicBuilder = significanceHeuristicBuilder;
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -64,8 +64,7 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
protected SignificantTermsAggregatorFactory doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(aggregationName, valuesSourceType,
|
||||
targetValueType);
|
||||
SignificantTermsAggregatorFactory factory = new SignificantTermsAggregatorFactory(aggregationName, targetValueType);
|
||||
if (bucketCountThresholds != null) {
|
||||
factory.bucketCountThresholds(bucketCountThresholds);
|
||||
}
|
||||
|
@ -108,8 +107,8 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new SignificantTermsAggregatorFactory(null, null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new SignificantTermsAggregatorFactory(null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -38,7 +38,8 @@ public class JLHScore extends SignificanceHeuristic {
|
|||
|
||||
protected static final ParseField NAMES_FIELD = new ParseField("jlh");
|
||||
|
||||
private JLHScore() {}
|
||||
public JLHScore() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
|
@ -112,7 +113,7 @@ public class JLHScore extends SignificanceHeuristic {
|
|||
throws IOException, QueryShardException {
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken());
|
||||
throw new ElasticsearchParseException("failed to parse [jlh] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken());
|
||||
}
|
||||
return PROTOTYPE;
|
||||
}
|
||||
|
|
|
@ -38,7 +38,8 @@ public class PercentageScore extends SignificanceHeuristic {
|
|||
|
||||
protected static final ParseField NAMES_FIELD = new ParseField("percentage");
|
||||
|
||||
private PercentageScore() {}
|
||||
public PercentageScore() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
|
|
|
@ -347,7 +347,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr
|
|||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, factories, valuesSource, order, bucketCountThresholds, null, aggregationContext, parent, collectionMode,
|
||||
showTermDocCountError, pipelineAggregators, metaData);
|
||||
assert factories == null || factories.count() == 0;
|
||||
assert factories == null || factories.countAggregators() == 0;
|
||||
this.segmentDocCounts = context.bigArrays().newIntArray(1, true);
|
||||
}
|
||||
|
||||
|
|
|
@ -96,7 +96,7 @@ class InternalOrder extends Terms.Order {
|
|||
public static boolean isCountDesc(Terms.Order order) {
|
||||
if (order == COUNT_DESC) {
|
||||
return true;
|
||||
}else if (order instanceof CompoundOrder) {
|
||||
} else if (order instanceof CompoundOrder) {
|
||||
// check if its a compound order with count desc and the tie breaker (term asc)
|
||||
CompoundOrder compoundOrder = (CompoundOrder) order;
|
||||
if (compoundOrder.orderElements.size() == 2 && compoundOrder.orderElements.get(0) == COUNT_DESC && compoundOrder.orderElements.get(1) == TERM_ASC) {
|
||||
|
@ -106,6 +106,23 @@ class InternalOrder extends Terms.Order {
|
|||
return false;
|
||||
}
|
||||
|
||||
public static boolean isTermOrder(Terms.Order order) {
|
||||
if (order == TERM_ASC) {
|
||||
return true;
|
||||
} else if (order == TERM_DESC) {
|
||||
return true;
|
||||
} else if (order instanceof CompoundOrder) {
|
||||
// check if its a compound order with only a single element ordering
|
||||
// by term
|
||||
CompoundOrder compoundOrder = (CompoundOrder) order;
|
||||
if (compoundOrder.orderElements.size() == 1 && compoundOrder.orderElements.get(0) == TERM_ASC
|
||||
|| compoundOrder.orderElements.get(0) == TERM_DESC) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
final byte id;
|
||||
final String key;
|
||||
final boolean asc;
|
||||
|
|
|
@ -188,7 +188,7 @@ public abstract class InternalTerms<A extends InternalTerms, B extends InternalT
|
|||
}
|
||||
otherDocCount += terms.getSumOfOtherDocCounts();
|
||||
final long thisAggDocCountError;
|
||||
if (terms.buckets.size() < this.shardSize || this.order == InternalOrder.TERM_ASC || this.order == InternalOrder.TERM_DESC) {
|
||||
if (terms.buckets.size() < this.shardSize || InternalOrder.isTermOrder(order)) {
|
||||
thisAggDocCountError = 0;
|
||||
} else if (InternalOrder.isCountDesc(this.order)) {
|
||||
thisAggDocCountError = terms.buckets.get(terms.buckets.size() - 1).docCount;
|
||||
|
|
|
@ -42,8 +42,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFacto
|
|||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -135,7 +133,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
if (includeExclude != null || factories.count() > 0
|
||||
if (includeExclude != null || factories.countAggregators() > 0
|
||||
// we need the FieldData impl to be able to extract the
|
||||
// segment to global ord mapping
|
||||
|| valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) {
|
||||
|
@ -182,7 +180,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
}
|
||||
}
|
||||
|
||||
private List<Terms.Order> orders = Collections.singletonList(Terms.Order.count(false));
|
||||
private Terms.Order order = Terms.Order.compound(Terms.Order.count(false), Terms.Order.term(true));
|
||||
private IncludeExclude includeExclude = null;
|
||||
private String executionHint = null;
|
||||
private SubAggCollectionMode collectMode = SubAggCollectionMode.DEPTH_FIRST;
|
||||
|
@ -190,8 +188,8 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
private boolean showTermDocCountError = false;
|
||||
|
||||
public TermsAggregatorFactory(String name, ValuesSourceType valuesSourceType, ValueType valueType) {
|
||||
super(name, StringTerms.TYPE, valuesSourceType, valueType);
|
||||
public TermsAggregatorFactory(String name, ValueType valueType) {
|
||||
super(name, StringTerms.TYPE, ValuesSourceType.ANY, valueType);
|
||||
}
|
||||
|
||||
public TermsAggregator.BucketCountThresholds bucketCountThresholds() {
|
||||
|
@ -203,19 +201,65 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the size - indicating how many term buckets should be returned
|
||||
* (defaults to 10)
|
||||
*/
|
||||
public TermsAggregatorFactory size(int size) {
|
||||
bucketCountThresholds.setRequiredSize(size);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the shard_size - indicating the number of term buckets each shard
|
||||
* will return to the coordinating node (the node that coordinates the
|
||||
* search execution). The higher the shard size is, the more accurate the
|
||||
* results are.
|
||||
*/
|
||||
public TermsAggregatorFactory shardSize(int shardSize) {
|
||||
bucketCountThresholds.setShardSize(shardSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum document count terms should have in order to appear in
|
||||
* the response.
|
||||
*/
|
||||
public TermsAggregatorFactory minDocCount(long minDocCount) {
|
||||
bucketCountThresholds.setMinDocCount(minDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum document count terms should have on the shard in order to
|
||||
* appear in the response.
|
||||
*/
|
||||
public TermsAggregatorFactory shardMinDocCount(long shardMinDocCount) {
|
||||
bucketCountThresholds.setShardMinDocCount(shardMinDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the order in which the buckets will be returned.
|
||||
*/
|
||||
public TermsAggregatorFactory order(List<Terms.Order> order) {
|
||||
this.orders = order;
|
||||
public TermsAggregatorFactory order(Terms.Order order) {
|
||||
this.order = order;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the order in which the buckets will be returned.
|
||||
*/
|
||||
public TermsAggregatorFactory order(List<Terms.Order> orders) {
|
||||
order(Terms.Order.compound(orders));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the order in which the buckets will be returned.
|
||||
*/
|
||||
public List<Terms.Order> order() {
|
||||
return orders;
|
||||
public Terms.Order order() {
|
||||
return order;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -281,7 +325,6 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
Terms.Order order = resolveOrder(orders);
|
||||
final InternalAggregation aggregation = new UnmappedTerms(name, order, bucketCountThresholds.getRequiredSize(),
|
||||
bucketCountThresholds.getShardSize(), bucketCountThresholds.getMinDocCount(), pipelineAggregators, metaData);
|
||||
return new NonCollectingAggregator(name, aggregationContext, parent, factories, pipelineAggregators, metaData) {
|
||||
|
@ -315,7 +358,6 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
protected Aggregator doCreateInternal(ValuesSource valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
Terms.Order order = resolveOrder(orders);
|
||||
if (collectsFromSingleBucket == false) {
|
||||
return asMultiBucketAggregator(this, aggregationContext, parent);
|
||||
}
|
||||
|
@ -415,11 +457,8 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
if (executionHint != null) {
|
||||
builder.field(TermsAggregatorFactory.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
}
|
||||
builder.startArray(ORDER_FIELD.getPreferredName());
|
||||
for (Terms.Order order : orders) {
|
||||
order.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
builder.field(ORDER_FIELD.getPreferredName());
|
||||
order.toXContent(builder, params);
|
||||
builder.field(SubAggCollectionMode.KEY.getPreferredName(), collectMode.parseField().getPreferredName());
|
||||
if (includeExclude != null) {
|
||||
includeExclude.toXContent(builder, params);
|
||||
|
@ -430,19 +469,14 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
@Override
|
||||
protected TermsAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
TermsAggregatorFactory factory = new TermsAggregatorFactory(name, valuesSourceType, targetValueType);
|
||||
TermsAggregatorFactory factory = new TermsAggregatorFactory(name, targetValueType);
|
||||
factory.bucketCountThresholds = BucketCountThresholds.readFromStream(in);
|
||||
factory.collectMode = SubAggCollectionMode.BREADTH_FIRST.readFrom(in);
|
||||
factory.executionHint = in.readOptionalString();
|
||||
if (in.readBoolean()) {
|
||||
factory.includeExclude = IncludeExclude.readFromStream(in);
|
||||
}
|
||||
int numOrders = in.readVInt();
|
||||
List<Terms.Order> orders = new ArrayList<>(numOrders);
|
||||
for (int i = 0; i < numOrders; i++) {
|
||||
orders.add(InternalOrder.Streams.readOrder(in));
|
||||
}
|
||||
factory.orders = orders;
|
||||
factory.order = InternalOrder.Streams.readOrder(in);
|
||||
factory.showTermDocCountError = in.readBoolean();
|
||||
return factory;
|
||||
}
|
||||
|
@ -457,16 +491,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
if (hasIncExc) {
|
||||
includeExclude.writeTo(out);
|
||||
}
|
||||
out.writeVInt(orders.size());
|
||||
for (Terms.Order order : orders) {
|
||||
InternalOrder.Streams.writeOrder(order, out);
|
||||
}
|
||||
InternalOrder.Streams.writeOrder(order, out);
|
||||
out.writeBoolean(showTermDocCountError);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
return Objects.hash(bucketCountThresholds, collectMode, executionHint, includeExclude, orders, showTermDocCountError);
|
||||
return Objects.hash(bucketCountThresholds, collectMode, executionHint, includeExclude, order, showTermDocCountError);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -476,7 +507,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
&& Objects.equals(collectMode, other.collectMode)
|
||||
&& Objects.equals(executionHint, other.executionHint)
|
||||
&& Objects.equals(includeExclude, other.includeExclude)
|
||||
&& Objects.equals(orders, other.orders)
|
||||
&& Objects.equals(order, other.order)
|
||||
&& Objects.equals(showTermDocCountError, other.showTermDocCountError);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,276 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.ValuesSourceAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Builder for the {@link Terms} aggregation.
|
||||
*/
|
||||
public class TermsBuilder extends ValuesSourceAggregationBuilder<TermsBuilder> {
|
||||
|
||||
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(-1, -1, -1, -1);
|
||||
|
||||
private Terms.ValueType valueType;
|
||||
private Terms.Order order;
|
||||
private String includePattern;
|
||||
private String excludePattern;
|
||||
private String executionHint;
|
||||
private SubAggCollectionMode collectionMode;
|
||||
private Boolean showTermDocCountError;
|
||||
private String[] includeTerms = null;
|
||||
private String[] excludeTerms = null;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public TermsBuilder(String name) {
|
||||
super(name, "terms");
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the size - indicating how many term buckets should be returned (defaults to 10)
|
||||
*/
|
||||
public TermsBuilder size(int size) {
|
||||
bucketCountThresholds.setRequiredSize(size);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the shard_size - indicating the number of term buckets each shard will return to the coordinating node (the
|
||||
* node that coordinates the search execution). The higher the shard size is, the more accurate the results are.
|
||||
*/
|
||||
public TermsBuilder shardSize(int shardSize) {
|
||||
bucketCountThresholds.setShardSize(shardSize);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum document count terms should have in order to appear in the response.
|
||||
*/
|
||||
public TermsBuilder minDocCount(long minDocCount) {
|
||||
bucketCountThresholds.setMinDocCount(minDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minimum document count terms should have on the shard in order to appear in the response.
|
||||
*/
|
||||
public TermsBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
bucketCountThresholds.setShardMinDocCount(shardMinDocCount);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a regular expression that will determine what terms should be aggregated. The regular expression is based
|
||||
* on the {@link RegExp} class.
|
||||
*
|
||||
* @see RegExp#RegExp(String)
|
||||
*/
|
||||
public TermsBuilder include(String regex) {
|
||||
if (includeTerms != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of strings or a regex, not both");
|
||||
}
|
||||
this.includePattern = regex;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a set of terms that should be aggregated.
|
||||
*/
|
||||
public TermsBuilder include(String [] terms) {
|
||||
if (includePattern != null) {
|
||||
throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.includeTerms = terms;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a set of terms that should be aggregated.
|
||||
*/
|
||||
public TermsBuilder include(long [] terms) {
|
||||
if (includePattern != null) {
|
||||
throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.includeTerms = longsArrToStringArr(terms);
|
||||
return this;
|
||||
}
|
||||
|
||||
private String[] longsArrToStringArr(long[] terms) {
|
||||
String[] termsAsString = new String[terms.length];
|
||||
for (int i = 0; i < terms.length; i++) {
|
||||
termsAsString[i] = Long.toString(terms[i]);
|
||||
}
|
||||
return termsAsString;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Define a set of terms that should be aggregated.
|
||||
*/
|
||||
public TermsBuilder include(double [] terms) {
|
||||
if (includePattern != null) {
|
||||
throw new IllegalArgumentException("include clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.includeTerms = doubleArrToStringArr(terms);
|
||||
return this;
|
||||
}
|
||||
|
||||
private String[] doubleArrToStringArr(double[] terms) {
|
||||
String[] termsAsString = new String[terms.length];
|
||||
for (int i = 0; i < terms.length; i++) {
|
||||
termsAsString[i] = Double.toString(terms[i]);
|
||||
}
|
||||
return termsAsString;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a regular expression that will filter out terms that should be excluded from the aggregation. The regular
|
||||
* expression is based on the {@link RegExp} class.
|
||||
*
|
||||
* @see RegExp#RegExp(String)
|
||||
*/
|
||||
public TermsBuilder exclude(String regex) {
|
||||
if (excludeTerms != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.excludePattern = regex;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a set of terms that should not be aggregated.
|
||||
*/
|
||||
public TermsBuilder exclude(String [] terms) {
|
||||
if (excludePattern != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.excludeTerms = terms;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Define a set of terms that should not be aggregated.
|
||||
*/
|
||||
public TermsBuilder exclude(long [] terms) {
|
||||
if (excludePattern != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.excludeTerms = longsArrToStringArr(terms);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Define a set of terms that should not be aggregated.
|
||||
*/
|
||||
public TermsBuilder exclude(double [] terms) {
|
||||
if (excludePattern != null) {
|
||||
throw new IllegalArgumentException("exclude clause must be an array of exact values or a regex, not both");
|
||||
}
|
||||
this.excludeTerms = doubleArrToStringArr(terms);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* When using scripts, the value type indicates the types of the values the script is generating.
|
||||
*/
|
||||
public TermsBuilder valueType(Terms.ValueType valueType) {
|
||||
this.valueType = valueType;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the order in which the buckets will be returned.
|
||||
*/
|
||||
public TermsBuilder order(Terms.Order order) {
|
||||
this.order = order;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: provide an execution hint to the aggregation.
|
||||
*/
|
||||
public TermsBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the collection mode.
|
||||
*/
|
||||
public TermsBuilder collectMode(SubAggCollectionMode mode) {
|
||||
this.collectionMode = mode;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: return document count errors per term in the response.
|
||||
*/
|
||||
public TermsBuilder showTermDocCountError(boolean showTermDocCountError) {
|
||||
this.showTermDocCountError = showTermDocCountError;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
||||
bucketCountThresholds.toXContent(builder, params);
|
||||
|
||||
if (showTermDocCountError != null) {
|
||||
builder.field(TermsAggregatorFactory.SHOW_TERM_DOC_COUNT_ERROR.getPreferredName(), showTermDocCountError);
|
||||
}
|
||||
if (executionHint != null) {
|
||||
builder.field(TermsAggregatorFactory.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
}
|
||||
if (valueType != null) {
|
||||
builder.field("value_type", valueType.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
if (order != null) {
|
||||
builder.field("order");
|
||||
order.toXContent(builder, params);
|
||||
}
|
||||
if (collectionMode != null) {
|
||||
builder.field(SubAggCollectionMode.KEY.getPreferredName(), collectionMode.parseField().getPreferredName());
|
||||
}
|
||||
if (includeTerms != null) {
|
||||
builder.array("include", includeTerms);
|
||||
}
|
||||
if (includePattern != null) {
|
||||
builder.field("include", includePattern);
|
||||
}
|
||||
if (excludeTerms != null) {
|
||||
builder.array("exclude", excludeTerms);
|
||||
}
|
||||
if (excludePattern != null) {
|
||||
builder.field("exclude", excludePattern);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -52,7 +52,7 @@ public class TermsParser extends AbstractTermsParser {
|
|||
protected TermsAggregatorFactory doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
TermsAggregatorFactory factory = new TermsAggregatorFactory(aggregationName, valuesSourceType, targetValueType);
|
||||
TermsAggregatorFactory factory = new TermsAggregatorFactory(aggregationName, targetValueType);
|
||||
List<OrderElement> orderElements = (List<OrderElement>) otherOptions.get(TermsAggregatorFactory.ORDER_FIELD);
|
||||
if (orderElements != null) {
|
||||
List<Terms.Order> orders = new ArrayList<>(orderElements.size());
|
||||
|
@ -97,7 +97,7 @@ public class TermsParser extends AbstractTermsParser {
|
|||
orderElements.add(orderParam);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Order elements must be of type object in [" + aggregationName + "].");
|
||||
"Order elements must be of type object in [" + aggregationName + "] found token of type [" + token + "].");
|
||||
}
|
||||
}
|
||||
otherOptions.put(TermsAggregatorFactory.ORDER_FIELD, orderElements);
|
||||
|
@ -179,8 +179,8 @@ public class TermsParser extends AbstractTermsParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new TermsAggregatorFactory(null, null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new TermsAggregatorFactory(null, null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.terms.support;
|
|||
|
||||
import com.carrotsearch.hppc.LongHashSet;
|
||||
import com.carrotsearch.hppc.LongSet;
|
||||
|
||||
import org.apache.lucene.index.RandomAccessOrds;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.index.Terms;
|
||||
|
@ -226,6 +227,10 @@ public class IncludeExclude implements Writeable<IncludeExclude>, ToXContent {
|
|||
this.excludeValues = null;
|
||||
}
|
||||
|
||||
public IncludeExclude(String include, String exclude) {
|
||||
this(include == null ? null : new RegExp(include), exclude == null ? null : new RegExp(exclude));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param includeValues The terms to be included
|
||||
* @param excludeValues The terms to be excluded
|
||||
|
@ -240,6 +245,51 @@ public class IncludeExclude implements Writeable<IncludeExclude>, ToXContent {
|
|||
this.excludeValues = excludeValues;
|
||||
}
|
||||
|
||||
public IncludeExclude(String[] includeValues, String[] excludeValues) {
|
||||
this(convertToBytesRefSet(includeValues), convertToBytesRefSet(excludeValues));
|
||||
}
|
||||
|
||||
public IncludeExclude(double[] includeValues, double[] excludeValues) {
|
||||
this(convertToBytesRefSet(includeValues), convertToBytesRefSet(excludeValues));
|
||||
}
|
||||
|
||||
public IncludeExclude(long[] includeValues, long[] excludeValues) {
|
||||
this(convertToBytesRefSet(includeValues), convertToBytesRefSet(excludeValues));
|
||||
}
|
||||
|
||||
private static SortedSet<BytesRef> convertToBytesRefSet(String[] values) {
|
||||
SortedSet<BytesRef> returnSet = null;
|
||||
if (values != null) {
|
||||
returnSet = new TreeSet<>();
|
||||
for (String value : values) {
|
||||
returnSet.add(new BytesRef(value));
|
||||
}
|
||||
}
|
||||
return returnSet;
|
||||
}
|
||||
|
||||
private static SortedSet<BytesRef> convertToBytesRefSet(double[] values) {
|
||||
SortedSet<BytesRef> returnSet = null;
|
||||
if (values != null) {
|
||||
returnSet = new TreeSet<>();
|
||||
for (double value : values) {
|
||||
returnSet.add(new BytesRef(String.valueOf(value)));
|
||||
}
|
||||
}
|
||||
return returnSet;
|
||||
}
|
||||
|
||||
private static SortedSet<BytesRef> convertToBytesRefSet(long[] values) {
|
||||
SortedSet<BytesRef> returnSet = null;
|
||||
if (values != null) {
|
||||
returnSet = new TreeSet<>();
|
||||
for (long value : values) {
|
||||
returnSet.add(new BytesRef(String.valueOf(value)));
|
||||
}
|
||||
}
|
||||
return returnSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Terms adapter around doc values.
|
||||
*/
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Base builder for metrics aggregations.
|
||||
*/
|
||||
public abstract class MetricsAggregationBuilder<B extends MetricsAggregationBuilder<B>> extends AbstractAggregationBuilder {
|
||||
|
||||
public MetricsAggregationBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(getName()).startObject(type);
|
||||
internalXContent(builder, params);
|
||||
return builder.endObject().endObject();
|
||||
}
|
||||
|
||||
protected abstract void internalXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
}
|
|
@ -1,88 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class ValuesSourceMetricsAggregationBuilder<B extends ValuesSourceMetricsAggregationBuilder<B>> extends MetricsAggregationBuilder<B> {
|
||||
|
||||
private String field;
|
||||
private Script script;
|
||||
private String format;
|
||||
private Object missing;
|
||||
|
||||
protected ValuesSourceMetricsAggregationBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public B field(String field) {
|
||||
this.field = field;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The script to use for this aggregation
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public B script(Script script) {
|
||||
this.script = script;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public B format(String format) {
|
||||
this.format = format;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the value to use when documents miss a value.
|
||||
*/
|
||||
public B missing(Object missingValue) {
|
||||
this.missing = missingValue;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (field != null) {
|
||||
builder.field("field", field);
|
||||
}
|
||||
|
||||
if (script != null) {
|
||||
builder.field("script", script);
|
||||
}
|
||||
|
||||
if (format != null) {
|
||||
builder.field("format", format);
|
||||
}
|
||||
|
||||
if (missing != null) {
|
||||
builder.field("missing", missing);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -56,8 +56,8 @@ public class AvgParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new AvgAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new AvgAggregator.Factory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ public class CardinalityParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new CardinalityAggregatorFactory(null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new CardinalityAggregatorFactory(null, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,8 +66,8 @@ public class GeoBoundsParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new GeoBoundsAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new GeoBoundsAggregator.Factory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class GeoCentroidParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new GeoCentroidAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new GeoCentroidAggregator.Factory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,8 +56,8 @@ public class MaxParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new MaxAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new MaxAggregator.Factory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ public class MinParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new MinAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new MinAggregator.Factory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,87 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ValuesSourceMetricsAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
abstract class AbstractPercentilesBuilder<PB extends AbstractPercentilesBuilder<PB>> extends
|
||||
ValuesSourceMetricsAggregationBuilder<PB> {
|
||||
|
||||
private Double compression;
|
||||
private PercentilesMethod method;
|
||||
private Integer numberOfSignificantValueDigits;
|
||||
|
||||
public AbstractPercentilesBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: Set the method to use to compute the percentiles.
|
||||
*/
|
||||
public PB method(PercentilesMethod method) {
|
||||
this.method = method;
|
||||
return (PB) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public PB compression(double compression) {
|
||||
this.compression = compression;
|
||||
return (PB) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public PB numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||
return (PB) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.internalXContent(builder, params);
|
||||
|
||||
doInternalXContent(builder, params);
|
||||
|
||||
if (method != null) {
|
||||
builder.startObject(method.getName());
|
||||
|
||||
if (compression != null) {
|
||||
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
}
|
||||
|
||||
if (numberOfSignificantValueDigits != null) {
|
||||
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void doInternalXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
|
||||
}
|
|
@ -0,0 +1,230 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentileRanksAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentileRanksAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory.LeafOnly;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PercentileRanksAggregatorFactory extends LeafOnly<ValuesSource.Numeric, PercentileRanksAggregatorFactory> {
|
||||
|
||||
private double[] values;
|
||||
private PercentilesMethod method = PercentilesMethod.TDIGEST;
|
||||
private int numberOfSignificantValueDigits = 3;
|
||||
private double compression = 100.0;
|
||||
private boolean keyed = false;
|
||||
|
||||
public PercentileRanksAggregatorFactory(String name) {
|
||||
super(name, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public PercentileRanksAggregatorFactory values(double... values) {
|
||||
double[] sortedValues = Arrays.copyOf(values, values.length);
|
||||
Arrays.sort(sortedValues);
|
||||
this.values = sortedValues;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the values to compute percentiles from.
|
||||
*/
|
||||
public double[] values() {
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public PercentileRanksAggregatorFactory keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get whether the XContent response should be keyed
|
||||
*/
|
||||
public boolean keyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public PercentileRanksAggregatorFactory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: get the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public int numberOfSignificantValueDigits() {
|
||||
return numberOfSignificantValueDigits;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public PercentileRanksAggregatorFactory compression(double compression) {
|
||||
this.compression = compression;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: get the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public double compression() {
|
||||
return compression;
|
||||
}
|
||||
|
||||
public PercentileRanksAggregatorFactory method(PercentilesMethod method) {
|
||||
this.method = method;
|
||||
return this;
|
||||
}
|
||||
|
||||
public PercentilesMethod method() {
|
||||
return method;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
switch (method) {
|
||||
case TDIGEST:
|
||||
return new TDigestPercentileRanksAggregator(name, null, aggregationContext, parent, values, compression, keyed,
|
||||
config.formatter(),
|
||||
pipelineAggregators, metaData);
|
||||
case HDR:
|
||||
return new HDRPercentileRanksAggregator(name, null, aggregationContext, parent, values, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
switch (method) {
|
||||
case TDIGEST:
|
||||
return new TDigestPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, compression, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
case HDR:
|
||||
return new HDRPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, numberOfSignificantValueDigits,
|
||||
keyed, config.formatter(), pipelineAggregators, metaData);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PercentileRanksAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
PercentileRanksAggregatorFactory factory = new PercentileRanksAggregatorFactory(name);
|
||||
factory.values = in.readDoubleArray();
|
||||
factory.keyed = in.readBoolean();
|
||||
factory.numberOfSignificantValueDigits = in.readVInt();
|
||||
factory.compression = in.readDouble();
|
||||
factory.method = PercentilesMethod.TDIGEST.readFrom(in);
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeDoubleArray(values);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeVInt(numberOfSignificantValueDigits);
|
||||
out.writeDouble(compression);
|
||||
method.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.getName());
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
} else {
|
||||
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
PercentileRanksAggregatorFactory other = (PercentileRanksAggregatorFactory) obj;
|
||||
if (!Objects.equals(method, other.method)) {
|
||||
return false;
|
||||
}
|
||||
boolean equalSettings = false;
|
||||
switch (method) {
|
||||
case HDR:
|
||||
equalSettings = Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits);
|
||||
break;
|
||||
case TDIGEST:
|
||||
equalSettings = Objects.equals(compression, other.compression);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
return equalSettings
|
||||
&& Objects.deepEquals(values, other.values)
|
||||
&& Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(method, other.method);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
switch (method) {
|
||||
case HDR:
|
||||
return Objects.hash(Arrays.hashCode(values), keyed, numberOfSignificantValueDigits, method);
|
||||
case TDIGEST:
|
||||
return Objects.hash(Arrays.hashCode(values), keyed, compression, method);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Builder for the {@link PercentileRanks} aggregation.
|
||||
*/
|
||||
public class PercentileRanksBuilder extends AbstractPercentilesBuilder<PercentileRanksBuilder> {
|
||||
|
||||
private double[] values;
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public PercentileRanksBuilder(String name) {
|
||||
super(name, PercentileRanks.TYPE_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public PercentileRanksBuilder percentiles(double... values) {
|
||||
this.values = values;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
||||
if (values != null) {
|
||||
builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -20,9 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentileRanksAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentileRanksAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
|
||||
|
@ -50,38 +48,28 @@ public class PercentileRanksParser extends AbstractPercentilesParser {
|
|||
@Override
|
||||
protected ValuesSourceAggregatorFactory<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) {
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
TDigestPercentileRanksAggregator.Factory factory = new TDigestPercentileRanksAggregator.Factory(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.values(keys);
|
||||
}
|
||||
if (compression != null) {
|
||||
factory.compression(compression);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
} else if (method == PercentilesMethod.HDR) {
|
||||
HDRPercentileRanksAggregator.Factory factory = new HDRPercentileRanksAggregator.Factory(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.values(keys);
|
||||
}
|
||||
if (numberOfSignificantValueDigits != null) {
|
||||
factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
} else {
|
||||
throw new AssertionError();
|
||||
PercentileRanksAggregatorFactory factory = new PercentileRanksAggregatorFactory(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.values(keys);
|
||||
}
|
||||
if (method != null) {
|
||||
factory.method(method);
|
||||
}
|
||||
if (compression != null) {
|
||||
factory.compression(compression);
|
||||
}
|
||||
if (numberOfSignificantValueDigits != null) {
|
||||
factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new TDigestPercentileRanksAggregator.Factory(null), new HDRPercentileRanksAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new PercentileRanksAggregatorFactory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,230 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory.LeafOnly;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PercentilesAggregatorFactory extends LeafOnly<ValuesSource.Numeric, PercentilesAggregatorFactory> {
|
||||
|
||||
private double[] percents = PercentilesParser.DEFAULT_PERCENTS;
|
||||
private PercentilesMethod method = PercentilesMethod.TDIGEST;
|
||||
private int numberOfSignificantValueDigits = 3;
|
||||
private double compression = 100.0;
|
||||
private boolean keyed = false;
|
||||
|
||||
public PercentilesAggregatorFactory(String name) {
|
||||
super(name, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public PercentilesAggregatorFactory percentiles(double... percents) {
|
||||
double[] sortedPercents = Arrays.copyOf(percents, percents.length);
|
||||
Arrays.sort(sortedPercents);
|
||||
this.percents = sortedPercents;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the values to compute percentiles from.
|
||||
*/
|
||||
public double[] percentiles() {
|
||||
return percents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public PercentilesAggregatorFactory keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get whether the XContent response should be keyed
|
||||
*/
|
||||
public boolean keyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public PercentilesAggregatorFactory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: get the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public int numberOfSignificantValueDigits() {
|
||||
return numberOfSignificantValueDigits;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public PercentilesAggregatorFactory compression(double compression) {
|
||||
this.compression = compression;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: get the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public double compression() {
|
||||
return compression;
|
||||
}
|
||||
|
||||
public PercentilesAggregatorFactory method(PercentilesMethod method) {
|
||||
this.method = method;
|
||||
return this;
|
||||
}
|
||||
|
||||
public PercentilesMethod method() {
|
||||
return method;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
switch (method) {
|
||||
case TDIGEST:
|
||||
return new TDigestPercentilesAggregator(name, null, aggregationContext, parent, percents, compression, keyed,
|
||||
config.formatter(),
|
||||
pipelineAggregators, metaData);
|
||||
case HDR:
|
||||
return new HDRPercentilesAggregator(name, null, aggregationContext, parent, percents, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
switch (method) {
|
||||
case TDIGEST:
|
||||
return new TDigestPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, compression, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
case HDR:
|
||||
return new HDRPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, numberOfSignificantValueDigits,
|
||||
keyed, config.formatter(), pipelineAggregators, metaData);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PercentilesAggregatorFactory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
PercentilesAggregatorFactory factory = new PercentilesAggregatorFactory(name);
|
||||
factory.percents = in.readDoubleArray();
|
||||
factory.keyed = in.readBoolean();
|
||||
factory.numberOfSignificantValueDigits = in.readVInt();
|
||||
factory.compression = in.readDouble();
|
||||
factory.method = PercentilesMethod.TDIGEST.readFrom(in);
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeDoubleArray(percents);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeVInt(numberOfSignificantValueDigits);
|
||||
out.writeDouble(compression);
|
||||
method.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.getName());
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
} else {
|
||||
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
PercentilesAggregatorFactory other = (PercentilesAggregatorFactory) obj;
|
||||
if (!Objects.equals(method, other.method)) {
|
||||
return false;
|
||||
}
|
||||
boolean equalSettings = false;
|
||||
switch (method) {
|
||||
case HDR:
|
||||
equalSettings = Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits);
|
||||
break;
|
||||
case TDIGEST:
|
||||
equalSettings = Objects.equals(compression, other.compression);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
return equalSettings
|
||||
&& Objects.deepEquals(percents, other.percents)
|
||||
&& Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(method, other.method);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
switch (method) {
|
||||
case HDR:
|
||||
return Objects.hash(Arrays.hashCode(percents), keyed, numberOfSignificantValueDigits, method);
|
||||
case TDIGEST:
|
||||
return Objects.hash(Arrays.hashCode(percents), keyed, compression, method);
|
||||
default:
|
||||
throw new IllegalStateException("Illegal method [" + method.getName() + "]");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
/**
|
||||
* Builder for the {@link Percentiles} aggregation.
|
||||
*/
|
||||
public class PercentilesBuilder extends AbstractPercentilesBuilder<PercentilesBuilder> {
|
||||
|
||||
double[] percentiles;
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
public PercentilesBuilder(String name) {
|
||||
super(name, Percentiles.TYPE_NAME);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the percentiles to compute.
|
||||
*/
|
||||
public PercentilesBuilder percentiles(double... percentiles) {
|
||||
for (int i = 0; i < percentiles.length; i++) {
|
||||
if (percentiles[i] < 0 || percentiles[i] > 100) {
|
||||
throw new IllegalArgumentException("the percents in the percentiles aggregation [" +
|
||||
getName() + "] must be in the [0, 100] range");
|
||||
}
|
||||
}
|
||||
this.percentiles = percentiles;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (percentiles != null) {
|
||||
builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percentiles);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -19,11 +19,16 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* An enum representing the methods for calculating percentiles
|
||||
*/
|
||||
public enum PercentilesMethod {
|
||||
public enum PercentilesMethod implements Writeable<PercentilesMethod> {
|
||||
/**
|
||||
* The TDigest method for calculating percentiles
|
||||
*/
|
||||
|
@ -46,6 +51,20 @@ public enum PercentilesMethod {
|
|||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PercentilesMethod readFrom(StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown PercentilesMethod ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link PercentilesMethod} for this method name. returns
|
||||
* <code>null</code> if no {@link PercentilesMethod} exists for the name.
|
||||
|
|
|
@ -20,9 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregator;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
|
||||
|
@ -52,38 +50,28 @@ public class PercentilesParser extends AbstractPercentilesParser {
|
|||
@Override
|
||||
protected ValuesSourceAggregatorFactory<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) {
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
TDigestPercentilesAggregator.Factory factory = new TDigestPercentilesAggregator.Factory(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.percents(keys);
|
||||
}
|
||||
if (compression != null) {
|
||||
factory.compression(compression);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
} else if (method == PercentilesMethod.HDR) {
|
||||
HDRPercentilesAggregator.Factory factory = new HDRPercentilesAggregator.Factory(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.percents(keys);
|
||||
}
|
||||
if (numberOfSignificantValueDigits != null) {
|
||||
factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
} else {
|
||||
throw new AssertionError();
|
||||
PercentilesAggregatorFactory factory = new PercentilesAggregatorFactory(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.percentiles(keys);
|
||||
}
|
||||
if (method != null) {
|
||||
factory.method(method);
|
||||
}
|
||||
if (compression != null) {
|
||||
factory.compression(compression);
|
||||
}
|
||||
if (numberOfSignificantValueDigits != null) {
|
||||
factory.numberOfSignificantValueDigits(numberOfSignificantValueDigits);
|
||||
}
|
||||
if (keyed != null) {
|
||||
factory.keyed(keyed);
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new TDigestPercentilesAggregator.Factory(null), new HDRPercentilesAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new PercentilesAggregatorFactory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,28 +19,16 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
|
||||
|
||||
import org.HdrHistogram.DoubleHistogram;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -82,116 +70,4 @@ public class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregat
|
|||
return InternalHDRPercentileRanks.percentileRank(state, Double.valueOf(name));
|
||||
}
|
||||
}
|
||||
|
||||
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric, Factory> {
|
||||
|
||||
private double[] values;
|
||||
private int numberOfSignificantValueDigits = 3;
|
||||
private boolean keyed = false;
|
||||
|
||||
public Factory(String name) {
|
||||
super(name, InternalHDRPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public Factory values(double[] values) {
|
||||
double[] sortedValues = Arrays.copyOf(values, values.length);
|
||||
Arrays.sort(sortedValues);
|
||||
this.values = sortedValues;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the values to compute percentiles from.
|
||||
*/
|
||||
public double[] values() {
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public Factory keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get whether the XContent response should be keyed
|
||||
*/
|
||||
public boolean keyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the number of significant digits in the values.
|
||||
*/
|
||||
public Factory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the number of significant digits in the values.
|
||||
*/
|
||||
public int numberOfSignificantValueDigits() {
|
||||
return numberOfSignificantValueDigits;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new HDRPercentileRanksAggregator(name, null, aggregationContext, parent, values, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new HDRPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, numberOfSignificantValueDigits,
|
||||
keyed, config.formatter(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
Factory factory = new Factory(name);
|
||||
factory.values = in.readDoubleArray();
|
||||
factory.keyed = in.readBoolean();
|
||||
factory.numberOfSignificantValueDigits = in.readVInt();
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeDoubleArray(values);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeVInt(numberOfSignificantValueDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(PercentilesMethod.HDR.getName());
|
||||
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
Factory other = (Factory) obj;
|
||||
return Objects.deepEquals(values, other.values) && Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
return Objects.hash(Arrays.hashCode(values), keyed, numberOfSignificantValueDigits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,28 +19,16 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;
|
||||
|
||||
import org.HdrHistogram.DoubleHistogram;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -83,116 +71,4 @@ public class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator {
|
|||
keyed,
|
||||
formatter, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric, Factory> {
|
||||
|
||||
private double[] percents = PercentilesParser.DEFAULT_PERCENTS;
|
||||
private int numberOfSignificantValueDigits = 3;
|
||||
private boolean keyed = false;
|
||||
|
||||
public Factory(String name) {
|
||||
super(name, InternalHDRPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the percentiles to compute.
|
||||
*/
|
||||
public Factory percents(double[] percents) {
|
||||
double[] sortedPercents = Arrays.copyOf(percents, percents.length);
|
||||
Arrays.sort(sortedPercents);
|
||||
this.percents = sortedPercents;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the percentiles to compute.
|
||||
*/
|
||||
public double[] percents() {
|
||||
return percents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public Factory keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get whether the XContent response should be keyed
|
||||
*/
|
||||
public boolean keyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the number of significant digits in the values.
|
||||
*/
|
||||
public Factory numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
this.numberOfSignificantValueDigits = numberOfSignificantValueDigits;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the number of significant digits in the values.
|
||||
*/
|
||||
public int numberOfSignificantValueDigits() {
|
||||
return numberOfSignificantValueDigits;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new HDRPercentilesAggregator(name, null, aggregationContext, parent, percents, numberOfSignificantValueDigits, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new HDRPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, numberOfSignificantValueDigits,
|
||||
keyed, config.formatter(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
Factory factory = new Factory(name);
|
||||
factory.percents = in.readDoubleArray();
|
||||
factory.keyed = in.readBoolean();
|
||||
factory.numberOfSignificantValueDigits = in.readVInt();
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeDoubleArray(percents);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeVInt(numberOfSignificantValueDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(PercentilesMethod.HDR.getName());
|
||||
builder.field(AbstractPercentilesParser.NUMBER_SIGNIFICANT_DIGITS_FIELD.getPreferredName(), numberOfSignificantValueDigits);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
Factory other = (Factory) obj;
|
||||
return Objects.deepEquals(percents, other.percents) && Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(numberOfSignificantValueDigits, other.numberOfSignificantValueDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
return Objects.hash(Arrays.hashCode(percents), keyed, numberOfSignificantValueDigits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,28 +18,16 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -77,118 +65,4 @@ public class TDigestPercentileRanksAggregator extends AbstractTDigestPercentiles
|
|||
return InternalTDigestPercentileRanks.percentileRank(state, Double.valueOf(name));
|
||||
}
|
||||
}
|
||||
|
||||
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric, Factory> {
|
||||
|
||||
private double[] values;
|
||||
private double compression = 100.0;
|
||||
private boolean keyed = false;
|
||||
|
||||
public Factory(String name) {
|
||||
super(name, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public Factory values(double[] values) {
|
||||
double[] sortedValues = Arrays.copyOf(values, values.length);
|
||||
Arrays.sort(sortedValues);
|
||||
this.values = sortedValues;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the values to compute percentiles from.
|
||||
*/
|
||||
public double[] values() {
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public Factory keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get whether the XContent response should be keyed
|
||||
*/
|
||||
public boolean keyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage.
|
||||
*/
|
||||
public Factory compression(double compression) {
|
||||
this.compression = compression;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage.
|
||||
*/
|
||||
public double compression() {
|
||||
return compression;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new TDigestPercentileRanksAggregator(name, null, aggregationContext, parent, values, compression, keyed, config.formatter(),
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new TDigestPercentileRanksAggregator(name, valuesSource, aggregationContext, parent, values, compression, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
Factory factory = new Factory(name);
|
||||
factory.values = in.readDoubleArray();
|
||||
factory.keyed = in.readBoolean();
|
||||
factory.compression = in.readDouble();
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeDoubleArray(values);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeDouble(compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(PercentilesMethod.TDIGEST.getName());
|
||||
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
Factory other = (Factory) obj;
|
||||
return Objects.deepEquals(values, other.values) && Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(compression, other.compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
return Objects.hash(Arrays.hashCode(values), keyed, compression);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,28 +18,16 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.AbstractPercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -77,118 +65,4 @@ public class TDigestPercentilesAggregator extends AbstractTDigestPercentilesAggr
|
|||
public InternalAggregation buildEmptyAggregation() {
|
||||
return new InternalTDigestPercentiles(name, keys, new TDigestState(compression), keyed, formatter, pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
public static class Factory extends ValuesSourceAggregatorFactory.LeafOnly<ValuesSource.Numeric, Factory> {
|
||||
|
||||
private double[] percents = PercentilesParser.DEFAULT_PERCENTS;
|
||||
private double compression = 100.0;
|
||||
private boolean keyed = false;
|
||||
|
||||
public Factory(String name) {
|
||||
super(name, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the percentiles to compute.
|
||||
*/
|
||||
public Factory percents(double[] percents) {
|
||||
double[] sortedPercents = Arrays.copyOf(percents, percents.length);
|
||||
Arrays.sort(sortedPercents);
|
||||
this.percents = sortedPercents;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the percentiles to compute.
|
||||
*/
|
||||
public double[] percents() {
|
||||
return percents;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public Factory keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get whether the XContent response should be keyed
|
||||
*/
|
||||
public boolean keyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage.
|
||||
*/
|
||||
public Factory compression(double compression) {
|
||||
this.compression = compression;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage.
|
||||
*/
|
||||
public double compression() {
|
||||
return compression;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new TDigestPercentilesAggregator(name, null, aggregationContext, parent, percents, compression, keyed, config.formatter(),
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, AggregationContext aggregationContext, Aggregator parent,
|
||||
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
return new TDigestPercentilesAggregator(name, valuesSource, aggregationContext, parent, percents, compression, keyed,
|
||||
config.formatter(), pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Factory innerReadFrom(String name, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, StreamInput in) throws IOException {
|
||||
Factory factory = new Factory(name);
|
||||
factory.percents = in.readDoubleArray();
|
||||
factory.keyed = in.readBoolean();
|
||||
factory.compression = in.readDouble();
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeDoubleArray(percents);
|
||||
out.writeBoolean(keyed);
|
||||
out.writeDouble(compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(PercentilesMethod.TDIGEST.getName());
|
||||
builder.field(AbstractPercentilesParser.COMPRESSION_FIELD.getPreferredName(), compression);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
Factory other = (Factory) obj;
|
||||
return Objects.deepEquals(percents, other.percents) && Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(compression, other.compression);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int innerHashCode() {
|
||||
return Objects.hash(Arrays.hashCode(percents), keyed, compression);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -151,8 +151,8 @@ public class ScriptedMetricParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new ScriptedMetricAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new ScriptedMetricAggregator.Factory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ public class StatsParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new StatsAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new StatsAggregator.Factory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ public class ExtendedStatsParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory<?>[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new ExtendedStatsAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new ExtendedStatsAggregator.Factory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ public class SumParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new SumAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new SumAggregator.Factory(null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.index.query.QueryParseContext;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregator;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.FieldsParseElement;
|
||||
|
@ -206,8 +205,8 @@ public class TopHitsParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new TopHitsAggregator.Factory(null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new TopHitsAggregator.Factory(null);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class ValueCountParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public AggregatorFactory<?>[] getFactoryPrototypes() {
|
||||
return new AggregatorFactory[] { new ValueCountAggregator.Factory(null, null) };
|
||||
public AggregatorFactory<?> getFactoryPrototypes() {
|
||||
return new ValueCountAggregator.Factory(null, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.pipeline;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A base class for all pipeline aggregator builders.
|
||||
*/
|
||||
public abstract class PipelineAggregatorBuilder<B extends PipelineAggregatorBuilder<B>> extends AbstractAggregationBuilder {
|
||||
|
||||
private String[] bucketsPaths;
|
||||
private Map<String, Object> metaData;
|
||||
|
||||
/**
|
||||
* Sole constructor, typically used by sub-classes.
|
||||
*/
|
||||
protected PipelineAggregatorBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the paths to the buckets to use for this pipeline aggregator
|
||||
*/
|
||||
public B setBucketsPaths(String... bucketsPaths) {
|
||||
this.bucketsPaths = bucketsPaths;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the meta data to be included in the pipeline aggregator's response
|
||||
*/
|
||||
public B setMetaData(Map<String, Object> metaData) {
|
||||
this.metaData = metaData;
|
||||
return (B)this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(getName());
|
||||
|
||||
if (this.metaData != null) {
|
||||
builder.field("meta", this.metaData);
|
||||
}
|
||||
builder.startObject(type);
|
||||
|
||||
if (bucketsPaths != null) {
|
||||
builder.startArray(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName());
|
||||
for (String path : bucketsPaths) {
|
||||
builder.value(path);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
||||
internalXContent(builder, params);
|
||||
|
||||
builder.endObject();
|
||||
|
||||
return builder.endObject();
|
||||
}
|
||||
|
||||
protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
}
|
|
@ -19,74 +19,87 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.pipeline;
|
||||
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.having.BucketSelectorPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregator;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public final class PipelineAggregatorBuilders {
|
||||
|
||||
private PipelineAggregatorBuilders() {
|
||||
}
|
||||
|
||||
public static final DerivativeBuilder derivative(String name) {
|
||||
return new DerivativeBuilder(name);
|
||||
public static final DerivativePipelineAggregator.Factory derivative(String name, String bucketsPath) {
|
||||
return new DerivativePipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final MaxBucketBuilder maxBucket(String name) {
|
||||
return new MaxBucketBuilder(name);
|
||||
public static final MaxBucketPipelineAggregator.Factory maxBucket(String name, String bucketsPath) {
|
||||
return new MaxBucketPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final MinBucketBuilder minBucket(String name) {
|
||||
return new MinBucketBuilder(name);
|
||||
public static final MinBucketPipelineAggregator.Factory minBucket(String name, String bucketsPath) {
|
||||
return new MinBucketPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final AvgBucketBuilder avgBucket(String name) {
|
||||
return new AvgBucketBuilder(name);
|
||||
public static final AvgBucketPipelineAggregator.Factory avgBucket(String name, String bucketsPath) {
|
||||
return new AvgBucketPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final SumBucketBuilder sumBucket(String name) {
|
||||
return new SumBucketBuilder(name);
|
||||
public static final SumBucketPipelineAggregator.Factory sumBucket(String name, String bucketsPath) {
|
||||
return new SumBucketPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final StatsBucketBuilder statsBucket(String name) {
|
||||
return new StatsBucketBuilder(name);
|
||||
public static final StatsBucketPipelineAggregator.Factory statsBucket(String name, String bucketsPath) {
|
||||
return new StatsBucketPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final ExtendedStatsBucketBuilder extendedStatsBucket(String name) {
|
||||
return new ExtendedStatsBucketBuilder(name);
|
||||
public static final ExtendedStatsBucketPipelineAggregator.Factory extendedStatsBucket(String name, String bucketsPath) {
|
||||
return new ExtendedStatsBucketPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final PercentilesBucketBuilder percentilesBucket(String name) {
|
||||
return new PercentilesBucketBuilder(name);
|
||||
public static final PercentilesBucketPipelineAggregator.Factory percentilesBucket(String name, String bucketsPath) {
|
||||
return new PercentilesBucketPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final MovAvgBuilder movingAvg(String name) {
|
||||
return new MovAvgBuilder(name);
|
||||
public static final MovAvgPipelineAggregator.Factory movingAvg(String name, String bucketsPath) {
|
||||
return new MovAvgPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final BucketScriptBuilder bucketScript(String name) {
|
||||
return new BucketScriptBuilder(name);
|
||||
public static final BucketScriptPipelineAggregator.Factory bucketScript(String name, Map<String, String> bucketsPathsMap,
|
||||
Script script) {
|
||||
return new BucketScriptPipelineAggregator.Factory(name, bucketsPathsMap, script);
|
||||
}
|
||||
|
||||
public static final BucketSelectorBuilder having(String name) {
|
||||
return new BucketSelectorBuilder(name);
|
||||
public static final BucketScriptPipelineAggregator.Factory bucketScript(String name, Script script, String... bucketsPaths) {
|
||||
return new BucketScriptPipelineAggregator.Factory(name, script, bucketsPaths);
|
||||
}
|
||||
|
||||
public static final CumulativeSumBuilder cumulativeSum(String name) {
|
||||
return new CumulativeSumBuilder(name);
|
||||
public static final BucketSelectorPipelineAggregator.Factory bucketSelector(String name, Map<String, String> bucketsPathsMap,
|
||||
Script script) {
|
||||
return new BucketSelectorPipelineAggregator.Factory(name, bucketsPathsMap, script);
|
||||
}
|
||||
|
||||
public static final SerialDiffBuilder diff(String name) {
|
||||
return new SerialDiffBuilder(name);
|
||||
public static final BucketSelectorPipelineAggregator.Factory bucketSelector(String name, Script script, String... bucketsPaths) {
|
||||
return new BucketSelectorPipelineAggregator.Factory(name, script, bucketsPaths);
|
||||
}
|
||||
|
||||
public static final CumulativeSumPipelineAggregator.Factory cumulativeSum(String name, String bucketsPath) {
|
||||
return new CumulativeSumPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
|
||||
public static final SerialDiffPipelineAggregator.Factory diff(String name, String bucketsPath) {
|
||||
return new SerialDiffPipelineAggregator.Factory(name, bucketsPath);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -110,11 +110,8 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem
|
|||
out.writeMap(metaData);
|
||||
}
|
||||
|
||||
// NORELEASE make this abstract when agg refactor complete
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
}
|
||||
protected abstract void doWriteTo(StreamOutput out) throws IOException;
|
||||
|
||||
// NORELEASE remove this method when agg refactor complete
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return type;
|
||||
|
@ -129,10 +126,7 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem
|
|||
return factory;
|
||||
}
|
||||
|
||||
// NORELEASE make this abstract when agg refactor complete
|
||||
protected PipelineAggregatorFactory doReadFrom(String name, String[] bucketsPaths, StreamInput in) throws IOException {
|
||||
return null;
|
||||
}
|
||||
protected abstract PipelineAggregatorFactory doReadFrom(String name, String[] bucketsPaths, StreamInput in) throws IOException;
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
@ -166,21 +160,14 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem
|
|||
return false;
|
||||
}
|
||||
|
||||
// NORELEASE make this method abstract when agg refactor complete
|
||||
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder;
|
||||
}
|
||||
protected abstract XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(Arrays.hashCode(bucketsPaths), metaData, name, type, doHashCode());
|
||||
}
|
||||
|
||||
// NORELEASE make this method abstract here when agg refactor complete (so
|
||||
// that subclasses are forced to implement it)
|
||||
protected int doHashCode() {
|
||||
return 0;
|
||||
}
|
||||
protected abstract int doHashCode();
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
|
@ -200,10 +187,6 @@ public abstract class PipelineAggregatorFactory extends ToXContentToBytes implem
|
|||
return doEquals(obj);
|
||||
}
|
||||
|
||||
// NORELEASE make this method abstract here when agg refactor complete (so
|
||||
// that subclasses are forced to implement it)
|
||||
protected boolean doEquals(Object obj) {
|
||||
return true;
|
||||
}
|
||||
protected abstract boolean doEquals(Object obj);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativeParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A builder for building requests for a {@link BucketMetricsPipelineAggregator}
|
||||
*/
|
||||
public abstract class BucketMetricsBuilder<B extends BucketMetricsBuilder<B>> extends PipelineAggregatorBuilder<B> {
|
||||
|
||||
private String format;
|
||||
private GapPolicy gapPolicy;
|
||||
|
||||
public BucketMetricsBuilder(String name, String type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
public B format(String format) {
|
||||
this.format = format;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
public B gapPolicy(GapPolicy gapPolicy) {
|
||||
this.gapPolicy = gapPolicy;
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (format != null) {
|
||||
builder.field(MinBucketParser.FORMAT.getPreferredName(), format);
|
||||
}
|
||||
if (gapPolicy != null) {
|
||||
builder.field(DerivativeParser.GAP_POLICY.getPreferredName(), gapPolicy.getName());
|
||||
}
|
||||
doInternalXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
}
|
||||
|
||||
}
|
|
@ -91,7 +91,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
|
|||
|
||||
BucketMetricsFactory factory = null;
|
||||
try {
|
||||
factory = buildFactory(pipelineAggregatorName, bucketsPaths, leftover);
|
||||
factory = buildFactory(pipelineAggregatorName, bucketsPaths[0], leftover);
|
||||
if (format != null) {
|
||||
factory.format(format);
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
|
|||
return factory;
|
||||
}
|
||||
|
||||
protected abstract BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths,
|
||||
protected abstract BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPaths,
|
||||
Map<String, Object> unparsedParams) throws ParseException;
|
||||
|
||||
}
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg;
|
||||
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder;
|
||||
|
||||
public class AvgBucketBuilder extends BucketMetricsBuilder<AvgBucketBuilder> {
|
||||
|
||||
public AvgBucketBuilder(String name) {
|
||||
super(name, AvgBucketPipelineAggregator.TYPE.name());
|
||||
}
|
||||
|
||||
}
|
|
@ -32,8 +32,8 @@ public class AvgBucketParser extends BucketMetricsParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, Map<String, Object> unparsedParams) {
|
||||
return new AvgBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths);
|
||||
protected BucketMetricsFactory buildFactory(String pipelineAggregatorName, String bucketsPath, Map<String, Object> unparsedParams) {
|
||||
return new AvgBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPath);
|
||||
}
|
||||
@Override
|
||||
public PipelineAggregatorFactory getFactoryPrototype() {
|
||||
|
|
|
@ -91,7 +91,11 @@ public class AvgBucketPipelineAggregator extends BucketMetricsPipelineAggregator
|
|||
|
||||
public static class Factory extends BucketMetricsFactory<Factory> {
|
||||
|
||||
public Factory(String name, String[] bucketsPaths) {
|
||||
public Factory(String name, String bucketsPath) {
|
||||
this(name, new String[] { bucketsPath });
|
||||
}
|
||||
|
||||
private Factory(String name, String[] bucketsPaths) {
|
||||
super(name, TYPE.name(), bucketsPaths);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max;
|
||||
|
||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder;
|
||||
|
||||
public class MaxBucketBuilder extends BucketMetricsBuilder<MaxBucketBuilder> {
|
||||
|
||||
public MaxBucketBuilder(String name) {
|
||||
super(name, MaxBucketPipelineAggregator.TYPE.name());
|
||||
}
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue