Reducers are now parsed in AggregatorParsers
This commit is contained in:
parent
ae76239b0a
commit
1e947c8d17
|
@ -27,7 +27,7 @@ import java.io.IOException;
|
||||||
/**
|
/**
|
||||||
* CommonTermsQuery query is a query that executes high-frequency terms in a
|
* CommonTermsQuery query is a query that executes high-frequency terms in a
|
||||||
* optional sub-query to prevent slow queries due to "common" terms like
|
* optional sub-query to prevent slow queries due to "common" terms like
|
||||||
* stopwords. This query basically builds 2 queries off the {@link #add(Term)
|
* stopwords. This query basically builds 2 queries off the {@link #addAggregator(Term)
|
||||||
* added} terms where low-frequency terms are added to a required boolean clause
|
* added} terms where low-frequency terms are added to a required boolean clause
|
||||||
* and high-frequency terms are added to an optional boolean clause. The
|
* and high-frequency terms are added to an optional boolean clause. The
|
||||||
* optional clause is only executed if the required "low-frequency' clause
|
* optional clause is only executed if the required "low-frequency' clause
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.elasticsearch.common.inject.AbstractModule;
|
import org.elasticsearch.common.inject.AbstractModule;
|
||||||
import org.elasticsearch.common.inject.Module;
|
import org.elasticsearch.common.inject.Module;
|
||||||
import org.elasticsearch.common.inject.SpawnModules;
|
import org.elasticsearch.common.inject.SpawnModules;
|
||||||
|
@ -54,6 +55,7 @@ import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStat
|
||||||
import org.elasticsearch.search.aggregations.metrics.sum.SumParser;
|
import org.elasticsearch.search.aggregations.metrics.sum.SumParser;
|
||||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsParser;
|
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsParser;
|
||||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser;
|
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser;
|
||||||
|
import org.elasticsearch.search.aggregations.reducers.Reducer;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -62,39 +64,40 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
public class AggregationModule extends AbstractModule implements SpawnModules{
|
public class AggregationModule extends AbstractModule implements SpawnModules{
|
||||||
|
|
||||||
private List<Class<? extends Aggregator.Parser>> parsers = Lists.newArrayList();
|
private List<Class<? extends Aggregator.Parser>> aggParsers = Lists.newArrayList();
|
||||||
|
private List<Class<? extends Reducer.Parser>> reducerParsers = Lists.newArrayList();
|
||||||
|
|
||||||
public AggregationModule() {
|
public AggregationModule() {
|
||||||
parsers.add(AvgParser.class);
|
aggParsers.add(AvgParser.class);
|
||||||
parsers.add(SumParser.class);
|
aggParsers.add(SumParser.class);
|
||||||
parsers.add(MinParser.class);
|
aggParsers.add(MinParser.class);
|
||||||
parsers.add(MaxParser.class);
|
aggParsers.add(MaxParser.class);
|
||||||
parsers.add(StatsParser.class);
|
aggParsers.add(StatsParser.class);
|
||||||
parsers.add(ExtendedStatsParser.class);
|
aggParsers.add(ExtendedStatsParser.class);
|
||||||
parsers.add(ValueCountParser.class);
|
aggParsers.add(ValueCountParser.class);
|
||||||
parsers.add(PercentilesParser.class);
|
aggParsers.add(PercentilesParser.class);
|
||||||
parsers.add(PercentileRanksParser.class);
|
aggParsers.add(PercentileRanksParser.class);
|
||||||
parsers.add(CardinalityParser.class);
|
aggParsers.add(CardinalityParser.class);
|
||||||
|
|
||||||
parsers.add(GlobalParser.class);
|
aggParsers.add(GlobalParser.class);
|
||||||
parsers.add(MissingParser.class);
|
aggParsers.add(MissingParser.class);
|
||||||
parsers.add(FilterParser.class);
|
aggParsers.add(FilterParser.class);
|
||||||
parsers.add(FiltersParser.class);
|
aggParsers.add(FiltersParser.class);
|
||||||
parsers.add(TermsParser.class);
|
aggParsers.add(TermsParser.class);
|
||||||
parsers.add(SignificantTermsParser.class);
|
aggParsers.add(SignificantTermsParser.class);
|
||||||
parsers.add(RangeParser.class);
|
aggParsers.add(RangeParser.class);
|
||||||
parsers.add(DateRangeParser.class);
|
aggParsers.add(DateRangeParser.class);
|
||||||
parsers.add(IpRangeParser.class);
|
aggParsers.add(IpRangeParser.class);
|
||||||
parsers.add(HistogramParser.class);
|
aggParsers.add(HistogramParser.class);
|
||||||
parsers.add(DateHistogramParser.class);
|
aggParsers.add(DateHistogramParser.class);
|
||||||
parsers.add(GeoDistanceParser.class);
|
aggParsers.add(GeoDistanceParser.class);
|
||||||
parsers.add(GeoHashGridParser.class);
|
aggParsers.add(GeoHashGridParser.class);
|
||||||
parsers.add(NestedParser.class);
|
aggParsers.add(NestedParser.class);
|
||||||
parsers.add(ReverseNestedParser.class);
|
aggParsers.add(ReverseNestedParser.class);
|
||||||
parsers.add(TopHitsParser.class);
|
aggParsers.add(TopHitsParser.class);
|
||||||
parsers.add(GeoBoundsParser.class);
|
aggParsers.add(GeoBoundsParser.class);
|
||||||
parsers.add(ScriptedMetricParser.class);
|
aggParsers.add(ScriptedMetricParser.class);
|
||||||
parsers.add(ChildrenParser.class);
|
aggParsers.add(ChildrenParser.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -103,14 +106,18 @@ public class AggregationModule extends AbstractModule implements SpawnModules{
|
||||||
* @param parser The parser for the custom aggregator.
|
* @param parser The parser for the custom aggregator.
|
||||||
*/
|
*/
|
||||||
public void addAggregatorParser(Class<? extends Aggregator.Parser> parser) {
|
public void addAggregatorParser(Class<? extends Aggregator.Parser> parser) {
|
||||||
parsers.add(parser);
|
aggParsers.add(parser);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void configure() {
|
protected void configure() {
|
||||||
Multibinder<Aggregator.Parser> multibinder = Multibinder.newSetBinder(binder(), Aggregator.Parser.class);
|
Multibinder<Aggregator.Parser> multibinderAggParser = Multibinder.newSetBinder(binder(), Aggregator.Parser.class);
|
||||||
for (Class<? extends Aggregator.Parser> parser : parsers) {
|
for (Class<? extends Aggregator.Parser> parser : aggParsers) {
|
||||||
multibinder.addBinding().to(parser);
|
multibinderAggParser.addBinding().to(parser);
|
||||||
|
}
|
||||||
|
Multibinder<Reducer.Parser> multibinderReducerParser = Multibinder.newSetBinder(binder(), Reducer.Parser.class);
|
||||||
|
for (Class<? extends Reducer.Parser> parser : reducerParsers) {
|
||||||
|
multibinderReducerParser.addBinding().to(parser);
|
||||||
}
|
}
|
||||||
bind(AggregatorParsers.class).asEagerSingleton();
|
bind(AggregatorParsers.class).asEagerSingleton();
|
||||||
bind(AggregationParseElement.class).asEagerSingleton();
|
bind(AggregationParseElement.class).asEagerSingleton();
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||||
import org.elasticsearch.search.aggregations.reducers.Reducer;
|
import org.elasticsearch.search.aggregations.reducers.Reducer;
|
||||||
|
import org.elasticsearch.search.aggregations.reducers.ReducerFactory;
|
||||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -36,18 +37,22 @@ public class AggregatorFactories {
|
||||||
public static final AggregatorFactories EMPTY = new Empty();
|
public static final AggregatorFactories EMPTY = new Empty();
|
||||||
|
|
||||||
private AggregatorFactory[] factories;
|
private AggregatorFactory[] factories;
|
||||||
private List<Reducer> reducers;
|
private List<ReducerFactory> reducerFactories;
|
||||||
|
|
||||||
public static Builder builder() {
|
public static Builder builder() {
|
||||||
return new Builder();
|
return new Builder();
|
||||||
}
|
}
|
||||||
|
|
||||||
private AggregatorFactories(AggregatorFactory[] factories, List<Reducer> reducers) {
|
private AggregatorFactories(AggregatorFactory[] factories, List<ReducerFactory> reducers) {
|
||||||
this.factories = factories;
|
this.factories = factories;
|
||||||
this.reducers = reducers;
|
this.reducerFactories = reducers;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Reducer> reducers() {
|
public List<Reducer> createReducers() throws IOException {
|
||||||
|
List<Reducer> reducers = new ArrayList<>();
|
||||||
|
for (ReducerFactory factory : this.reducerFactories) {
|
||||||
|
reducers.add(factory.create(null, null, false)); // NOCOMIT add context, parent etc.
|
||||||
|
}
|
||||||
return reducers;
|
return reducers;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,7 +112,7 @@ public class AggregatorFactories {
|
||||||
|
|
||||||
private static final AggregatorFactory[] EMPTY_FACTORIES = new AggregatorFactory[0];
|
private static final AggregatorFactory[] EMPTY_FACTORIES = new AggregatorFactory[0];
|
||||||
private static final Aggregator[] EMPTY_AGGREGATORS = new Aggregator[0];
|
private static final Aggregator[] EMPTY_AGGREGATORS = new Aggregator[0];
|
||||||
private static final List<Reducer> EMPTY_REDUCERS = new ArrayList<>();
|
private static final List<ReducerFactory> EMPTY_REDUCERS = new ArrayList<>();
|
||||||
|
|
||||||
private Empty() {
|
private Empty() {
|
||||||
super(EMPTY_FACTORIES, EMPTY_REDUCERS);
|
super(EMPTY_FACTORIES, EMPTY_REDUCERS);
|
||||||
|
@ -129,9 +134,9 @@ public class AggregatorFactories {
|
||||||
|
|
||||||
private final Set<String> names = new HashSet<>();
|
private final Set<String> names = new HashSet<>();
|
||||||
private final List<AggregatorFactory> factories = new ArrayList<>();
|
private final List<AggregatorFactory> factories = new ArrayList<>();
|
||||||
private List<Reducer> reducers = new ArrayList<>();
|
private final List<ReducerFactory> reducerFactories = new ArrayList<>();
|
||||||
|
|
||||||
public Builder add(AggregatorFactory factory) {
|
public Builder addAggregator(AggregatorFactory factory) {
|
||||||
if (!names.add(factory.name)) {
|
if (!names.add(factory.name)) {
|
||||||
throw new ElasticsearchIllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
|
throw new ElasticsearchIllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
|
||||||
}
|
}
|
||||||
|
@ -139,8 +144,8 @@ public class AggregatorFactories {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Builder setReducers(List<Reducer> reducers) {
|
public Builder addReducer(ReducerFactory reducerFactory) {
|
||||||
this.reducers = reducers;
|
this.reducerFactories.add(reducerFactory);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,7 +153,8 @@ public class AggregatorFactories {
|
||||||
if (factories.isEmpty()) {
|
if (factories.isEmpty()) {
|
||||||
return EMPTY;
|
return EMPTY;
|
||||||
}
|
}
|
||||||
return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]), this.reducers);
|
// NOCOMMIT work out dependency order of reducer factories
|
||||||
|
return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]), this.reducerFactories);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ public abstract class AggregatorFactory {
|
||||||
* @return The created aggregator
|
* @return The created aggregator
|
||||||
*/
|
*/
|
||||||
public final Aggregator create(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket) throws IOException {
|
public final Aggregator create(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket) throws IOException {
|
||||||
return createInternal(context, parent, collectsFromSingleBucket, this.factories.reducers(), this.metaData);
|
return createInternal(context, parent, collectsFromSingleBucket, this.factories.createReducers(), this.metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void doValidate() {
|
public void doValidate() {
|
||||||
|
|
|
@ -19,10 +19,13 @@
|
||||||
package org.elasticsearch.search.aggregations;
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
|
||||||
import org.elasticsearch.common.collect.MapBuilder;
|
import org.elasticsearch.common.collect.MapBuilder;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.SearchParseException;
|
import org.elasticsearch.search.SearchParseException;
|
||||||
|
import org.elasticsearch.search.aggregations.reducers.Reducer;
|
||||||
|
import org.elasticsearch.search.aggregations.reducers.ReducerFactory;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -37,21 +40,30 @@ import java.util.regex.Pattern;
|
||||||
public class AggregatorParsers {
|
public class AggregatorParsers {
|
||||||
|
|
||||||
public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+");
|
public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+");
|
||||||
private final ImmutableMap<String, Aggregator.Parser> parsers;
|
private final ImmutableMap<String, Aggregator.Parser> aggParsers;
|
||||||
|
private final ImmutableMap<String, Reducer.Parser> reducerParsers;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs the AggregatorParsers out of all the given parsers
|
* Constructs the AggregatorParsers out of all the given parsers
|
||||||
*
|
*
|
||||||
* @param parsers The available aggregator parsers (dynamically injected by the {@link org.elasticsearch.search.aggregations.AggregationModule}).
|
* @param aggParsers
|
||||||
|
* The available aggregator parsers (dynamically injected by the
|
||||||
|
* {@link org.elasticsearch.search.aggregations.AggregationModule}
|
||||||
|
* ).
|
||||||
*/
|
*/
|
||||||
@Inject
|
@Inject
|
||||||
public AggregatorParsers(Set<Aggregator.Parser> parsers) {
|
public AggregatorParsers(Set<Aggregator.Parser> aggParsers, Set<Reducer.Parser> reducerParsers) {
|
||||||
MapBuilder<String, Aggregator.Parser> builder = MapBuilder.newMapBuilder();
|
MapBuilder<String, Aggregator.Parser> aggParsersBuilder = MapBuilder.newMapBuilder();
|
||||||
for (Aggregator.Parser parser : parsers) {
|
for (Aggregator.Parser parser : aggParsers) {
|
||||||
builder.put(parser.type(), parser);
|
aggParsersBuilder.put(parser.type(), parser);
|
||||||
}
|
}
|
||||||
this.parsers = builder.immutableMap();
|
this.aggParsers = aggParsersBuilder.immutableMap();
|
||||||
|
MapBuilder<String, Reducer.Parser> reducerParsersBuilder = MapBuilder.newMapBuilder();
|
||||||
|
for (Reducer.Parser parser : reducerParsers) {
|
||||||
|
reducerParsersBuilder.put(parser.type(), parser);
|
||||||
|
}
|
||||||
|
this.reducerParsers = reducerParsersBuilder.immutableMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -61,7 +73,18 @@ public class AggregatorParsers {
|
||||||
* @return The parser associated with the given aggregation type.
|
* @return The parser associated with the given aggregation type.
|
||||||
*/
|
*/
|
||||||
public Aggregator.Parser parser(String type) {
|
public Aggregator.Parser parser(String type) {
|
||||||
return parsers.get(type);
|
return aggParsers.get(type);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the parser that is registered under the given reducer type.
|
||||||
|
*
|
||||||
|
* @param type
|
||||||
|
* The reducer type
|
||||||
|
* @return The parser associated with the given reducer type.
|
||||||
|
*/
|
||||||
|
public Reducer.Parser reducer(String type) {
|
||||||
|
return reducerParsers.get(type);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -98,7 +121,8 @@ public class AggregatorParsers {
|
||||||
throw new SearchParseException(context, "Aggregation definition for [" + aggregationName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
|
throw new SearchParseException(context, "Aggregation definition for [" + aggregationName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
|
||||||
}
|
}
|
||||||
|
|
||||||
AggregatorFactory factory = null;
|
AggregatorFactory aggFactory = null;
|
||||||
|
ReducerFactory reducerFactory = null;
|
||||||
AggregatorFactories subFactories = null;
|
AggregatorFactories subFactories = null;
|
||||||
|
|
||||||
Map<String, Object> metaData = null;
|
Map<String, Object> metaData = null;
|
||||||
|
@ -126,34 +150,49 @@ public class AggregatorParsers {
|
||||||
subFactories = parseAggregators(parser, context, level+1);
|
subFactories = parseAggregators(parser, context, level+1);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
if (factory != null) {
|
if (aggFactory != null) {
|
||||||
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: [" + factory.type + "] and [" + fieldName + "]");
|
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: ["
|
||||||
|
+ aggFactory.type + "] and [" + fieldName + "]");
|
||||||
}
|
}
|
||||||
Aggregator.Parser aggregatorParser = parser(fieldName);
|
Aggregator.Parser aggregatorParser = parser(fieldName);
|
||||||
if (aggregatorParser == null) {
|
if (aggregatorParser == null) {
|
||||||
throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]");
|
Reducer.Parser reducerParser = reducer(fieldName);
|
||||||
|
if (reducerParser == null) {
|
||||||
|
throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in ["
|
||||||
|
+ aggregationName + "]");
|
||||||
|
} else {
|
||||||
|
reducerFactory = reducerParser.parse(aggregationName, parser, context);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
aggFactory = aggregatorParser.parse(aggregationName, parser, context);
|
||||||
}
|
}
|
||||||
factory = aggregatorParser.parse(aggregationName, parser, context);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (factory == null) {
|
if (aggFactory == null && reducerFactory == null) {
|
||||||
throw new SearchParseException(context, "Missing definition for aggregation [" + aggregationName + "]");
|
throw new SearchParseException(context, "Missing definition for aggregation [" + aggregationName + "]");
|
||||||
}
|
} else if (aggFactory != null) {
|
||||||
|
|
||||||
if (metaData != null) {
|
if (metaData != null) {
|
||||||
factory.setMetaData(metaData);
|
aggFactory.setMetaData(metaData);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (subFactories != null) {
|
if (subFactories != null) {
|
||||||
factory.subFactories(subFactories);
|
aggFactory.subFactories(subFactories);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (level == 0) {
|
if (level == 0) {
|
||||||
factory.validate();
|
aggFactory.validate();
|
||||||
}
|
}
|
||||||
|
|
||||||
factories.add(factory);
|
factories.addAggregator(aggFactory);
|
||||||
|
} else if (reducerFactory != null) {
|
||||||
|
if (subFactories != null) {
|
||||||
|
throw new SearchParseException(context, "Aggregation [" + aggregationName + "] cannot define sub-aggregations");
|
||||||
|
}
|
||||||
|
factories.addReducer(reducerFactory);
|
||||||
|
} else {
|
||||||
|
throw new SearchParseException(context, "Found two sub aggregation definitions under [" + aggregationName + "]");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return factories.build();
|
return factories.build();
|
||||||
|
|
|
@ -120,7 +120,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeLuceneTestCase
|
||||||
AggregationContext context = new AggregationContext(searchContext);
|
AggregationContext context = new AggregationContext(searchContext);
|
||||||
|
|
||||||
AggregatorFactories.Builder builder = AggregatorFactories.builder();
|
AggregatorFactories.Builder builder = AggregatorFactories.builder();
|
||||||
builder.add(new NestedAggregator.Factory("test", "nested_field", FilterCachingPolicy.ALWAYS_CACHE));
|
builder.addAggregator(new NestedAggregator.Factory("test", "nested_field", FilterCachingPolicy.ALWAYS_CACHE));
|
||||||
AggregatorFactories factories = builder.build();
|
AggregatorFactories factories = builder.build();
|
||||||
searchContext.aggregations(new SearchContextAggregations(factories));
|
searchContext.aggregations(new SearchContextAggregations(factories));
|
||||||
Aggregator[] aggs = factories.createTopLevelAggregators(context);
|
Aggregator[] aggs = factories.createTopLevelAggregators(context);
|
||||||
|
|
Loading…
Reference in New Issue