Reducers are now parsed in AggregatorParsers

This commit is contained in:
Colin Goodheart-Smithe 2015-02-12 10:51:32 +00:00
parent ae76239b0a
commit 1e947c8d17
6 changed files with 125 additions and 73 deletions

View File

@ -27,7 +27,7 @@ import java.io.IOException;
/**
* CommonTermsQuery query is a query that executes high-frequency terms in a
* optional sub-query to prevent slow queries due to "common" terms like
* stopwords. This query basically builds 2 queries off the {@link #add(Term)
* stopwords. This query basically builds 2 queries off the {@link #addAggregator(Term)
* added} terms where low-frequency terms are added to a required boolean clause
* and high-frequency terms are added to an optional boolean clause. The
* optional clause is only executed if the required "low-frequency' clause

View File

@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.SpawnModules;
@ -54,6 +55,7 @@ import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStat
import org.elasticsearch.search.aggregations.metrics.sum.SumParser;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsParser;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import java.util.List;
@ -62,39 +64,40 @@ import java.util.List;
*/
public class AggregationModule extends AbstractModule implements SpawnModules{
private List<Class<? extends Aggregator.Parser>> parsers = Lists.newArrayList();
private List<Class<? extends Aggregator.Parser>> aggParsers = Lists.newArrayList();
private List<Class<? extends Reducer.Parser>> reducerParsers = Lists.newArrayList();
public AggregationModule() {
parsers.add(AvgParser.class);
parsers.add(SumParser.class);
parsers.add(MinParser.class);
parsers.add(MaxParser.class);
parsers.add(StatsParser.class);
parsers.add(ExtendedStatsParser.class);
parsers.add(ValueCountParser.class);
parsers.add(PercentilesParser.class);
parsers.add(PercentileRanksParser.class);
parsers.add(CardinalityParser.class);
aggParsers.add(AvgParser.class);
aggParsers.add(SumParser.class);
aggParsers.add(MinParser.class);
aggParsers.add(MaxParser.class);
aggParsers.add(StatsParser.class);
aggParsers.add(ExtendedStatsParser.class);
aggParsers.add(ValueCountParser.class);
aggParsers.add(PercentilesParser.class);
aggParsers.add(PercentileRanksParser.class);
aggParsers.add(CardinalityParser.class);
parsers.add(GlobalParser.class);
parsers.add(MissingParser.class);
parsers.add(FilterParser.class);
parsers.add(FiltersParser.class);
parsers.add(TermsParser.class);
parsers.add(SignificantTermsParser.class);
parsers.add(RangeParser.class);
parsers.add(DateRangeParser.class);
parsers.add(IpRangeParser.class);
parsers.add(HistogramParser.class);
parsers.add(DateHistogramParser.class);
parsers.add(GeoDistanceParser.class);
parsers.add(GeoHashGridParser.class);
parsers.add(NestedParser.class);
parsers.add(ReverseNestedParser.class);
parsers.add(TopHitsParser.class);
parsers.add(GeoBoundsParser.class);
parsers.add(ScriptedMetricParser.class);
parsers.add(ChildrenParser.class);
aggParsers.add(GlobalParser.class);
aggParsers.add(MissingParser.class);
aggParsers.add(FilterParser.class);
aggParsers.add(FiltersParser.class);
aggParsers.add(TermsParser.class);
aggParsers.add(SignificantTermsParser.class);
aggParsers.add(RangeParser.class);
aggParsers.add(DateRangeParser.class);
aggParsers.add(IpRangeParser.class);
aggParsers.add(HistogramParser.class);
aggParsers.add(DateHistogramParser.class);
aggParsers.add(GeoDistanceParser.class);
aggParsers.add(GeoHashGridParser.class);
aggParsers.add(NestedParser.class);
aggParsers.add(ReverseNestedParser.class);
aggParsers.add(TopHitsParser.class);
aggParsers.add(GeoBoundsParser.class);
aggParsers.add(ScriptedMetricParser.class);
aggParsers.add(ChildrenParser.class);
}
/**
@ -103,14 +106,18 @@ public class AggregationModule extends AbstractModule implements SpawnModules{
* @param parser The parser for the custom aggregator.
*/
public void addAggregatorParser(Class<? extends Aggregator.Parser> parser) {
parsers.add(parser);
aggParsers.add(parser);
}
@Override
protected void configure() {
Multibinder<Aggregator.Parser> multibinder = Multibinder.newSetBinder(binder(), Aggregator.Parser.class);
for (Class<? extends Aggregator.Parser> parser : parsers) {
multibinder.addBinding().to(parser);
Multibinder<Aggregator.Parser> multibinderAggParser = Multibinder.newSetBinder(binder(), Aggregator.Parser.class);
for (Class<? extends Aggregator.Parser> parser : aggParsers) {
multibinderAggParser.addBinding().to(parser);
}
Multibinder<Reducer.Parser> multibinderReducerParser = Multibinder.newSetBinder(binder(), Reducer.Parser.class);
for (Class<? extends Reducer.Parser> parser : reducerParsers) {
multibinderReducerParser.addBinding().to(parser);
}
bind(AggregatorParsers.class).asEagerSingleton();
bind(AggregationParseElement.class).asEagerSingleton();

View File

@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.reducers.ReducerFactory;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import java.io.IOException;
@ -36,18 +37,22 @@ public class AggregatorFactories {
public static final AggregatorFactories EMPTY = new Empty();
private AggregatorFactory[] factories;
private List<Reducer> reducers;
private List<ReducerFactory> reducerFactories;
public static Builder builder() {
return new Builder();
}
private AggregatorFactories(AggregatorFactory[] factories, List<Reducer> reducers) {
private AggregatorFactories(AggregatorFactory[] factories, List<ReducerFactory> reducers) {
this.factories = factories;
this.reducers = reducers;
this.reducerFactories = reducers;
}
public List<Reducer> reducers() {
public List<Reducer> createReducers() throws IOException {
List<Reducer> reducers = new ArrayList<>();
for (ReducerFactory factory : this.reducerFactories) {
reducers.add(factory.create(null, null, false)); // NOCOMIT add context, parent etc.
}
return reducers;
}
@ -107,7 +112,7 @@ public class AggregatorFactories {
private static final AggregatorFactory[] EMPTY_FACTORIES = new AggregatorFactory[0];
private static final Aggregator[] EMPTY_AGGREGATORS = new Aggregator[0];
private static final List<Reducer> EMPTY_REDUCERS = new ArrayList<>();
private static final List<ReducerFactory> EMPTY_REDUCERS = new ArrayList<>();
private Empty() {
super(EMPTY_FACTORIES, EMPTY_REDUCERS);
@ -129,9 +134,9 @@ public class AggregatorFactories {
private final Set<String> names = new HashSet<>();
private final List<AggregatorFactory> factories = new ArrayList<>();
private List<Reducer> reducers = new ArrayList<>();
private final List<ReducerFactory> reducerFactories = new ArrayList<>();
public Builder add(AggregatorFactory factory) {
public Builder addAggregator(AggregatorFactory factory) {
if (!names.add(factory.name)) {
throw new ElasticsearchIllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
}
@ -139,8 +144,8 @@ public class AggregatorFactories {
return this;
}
public Builder setReducers(List<Reducer> reducers) {
this.reducers = reducers;
public Builder addReducer(ReducerFactory reducerFactory) {
this.reducerFactories.add(reducerFactory);
return this;
}
@ -148,7 +153,8 @@ public class AggregatorFactories {
if (factories.isEmpty()) {
return EMPTY;
}
return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]), this.reducers);
// NOCOMMIT work out dependency order of reducer factories
return new AggregatorFactories(factories.toArray(new AggregatorFactory[factories.size()]), this.reducerFactories);
}
}
}

View File

@ -95,7 +95,7 @@ public abstract class AggregatorFactory {
* @return The created aggregator
*/
public final Aggregator create(AggregationContext context, Aggregator parent, boolean collectsFromSingleBucket) throws IOException {
return createInternal(context, parent, collectsFromSingleBucket, this.factories.reducers(), this.metaData);
return createInternal(context, parent, collectsFromSingleBucket, this.factories.createReducers(), this.metaData);
}
public void doValidate() {

View File

@ -19,10 +19,13 @@
package org.elasticsearch.search.aggregations;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.reducers.Reducer;
import org.elasticsearch.search.aggregations.reducers.ReducerFactory;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -37,21 +40,30 @@ import java.util.regex.Pattern;
public class AggregatorParsers {
public static final Pattern VALID_AGG_NAME = Pattern.compile("[^\\[\\]>]+");
private final ImmutableMap<String, Aggregator.Parser> parsers;
private final ImmutableMap<String, Aggregator.Parser> aggParsers;
private final ImmutableMap<String, Reducer.Parser> reducerParsers;
/**
* Constructs the AggregatorParsers out of all the given parsers
*
* @param parsers The available aggregator parsers (dynamically injected by the {@link org.elasticsearch.search.aggregations.AggregationModule}).
*
* @param aggParsers
* The available aggregator parsers (dynamically injected by the
* {@link org.elasticsearch.search.aggregations.AggregationModule}
* ).
*/
@Inject
public AggregatorParsers(Set<Aggregator.Parser> parsers) {
MapBuilder<String, Aggregator.Parser> builder = MapBuilder.newMapBuilder();
for (Aggregator.Parser parser : parsers) {
builder.put(parser.type(), parser);
public AggregatorParsers(Set<Aggregator.Parser> aggParsers, Set<Reducer.Parser> reducerParsers) {
MapBuilder<String, Aggregator.Parser> aggParsersBuilder = MapBuilder.newMapBuilder();
for (Aggregator.Parser parser : aggParsers) {
aggParsersBuilder.put(parser.type(), parser);
}
this.parsers = builder.immutableMap();
this.aggParsers = aggParsersBuilder.immutableMap();
MapBuilder<String, Reducer.Parser> reducerParsersBuilder = MapBuilder.newMapBuilder();
for (Reducer.Parser parser : reducerParsers) {
reducerParsersBuilder.put(parser.type(), parser);
}
this.reducerParsers = reducerParsersBuilder.immutableMap();
}
/**
@ -61,7 +73,18 @@ public class AggregatorParsers {
* @return The parser associated with the given aggregation type.
*/
public Aggregator.Parser parser(String type) {
return parsers.get(type);
return aggParsers.get(type);
}
/**
* Returns the parser that is registered under the given reducer type.
*
* @param type
* The reducer type
* @return The parser associated with the given reducer type.
*/
public Reducer.Parser reducer(String type) {
return reducerParsers.get(type);
}
/**
@ -98,7 +121,8 @@ public class AggregatorParsers {
throw new SearchParseException(context, "Aggregation definition for [" + aggregationName + " starts with a [" + token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
}
AggregatorFactory factory = null;
AggregatorFactory aggFactory = null;
ReducerFactory reducerFactory = null;
AggregatorFactories subFactories = null;
Map<String, Object> metaData = null;
@ -126,34 +150,49 @@ public class AggregatorParsers {
subFactories = parseAggregators(parser, context, level+1);
break;
default:
if (factory != null) {
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: [" + factory.type + "] and [" + fieldName + "]");
if (aggFactory != null) {
throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: ["
+ aggFactory.type + "] and [" + fieldName + "]");
}
Aggregator.Parser aggregatorParser = parser(fieldName);
if (aggregatorParser == null) {
throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in [" + aggregationName + "]");
Reducer.Parser reducerParser = reducer(fieldName);
if (reducerParser == null) {
throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in ["
+ aggregationName + "]");
} else {
reducerFactory = reducerParser.parse(aggregationName, parser, context);
}
factory = aggregatorParser.parse(aggregationName, parser, context);
} else {
aggFactory = aggregatorParser.parse(aggregationName, parser, context);
}
}
}
if (factory == null) {
if (aggFactory == null && reducerFactory == null) {
throw new SearchParseException(context, "Missing definition for aggregation [" + aggregationName + "]");
}
} else if (aggFactory != null) {
if (metaData != null) {
aggFactory.setMetaData(metaData);
}
if (metaData != null) {
factory.setMetaData(metaData);
}
if (subFactories != null) {
aggFactory.subFactories(subFactories);
}
if (subFactories != null) {
factory.subFactories(subFactories);
}
if (level == 0) {
aggFactory.validate();
}
if (level == 0) {
factory.validate();
factories.addAggregator(aggFactory);
} else if (reducerFactory != null) {
if (subFactories != null) {
throw new SearchParseException(context, "Aggregation [" + aggregationName + "] cannot define sub-aggregations");
}
factories.addReducer(reducerFactory);
} else {
throw new SearchParseException(context, "Found two sub aggregation definitions under [" + aggregationName + "]");
}
factories.add(factory);
}
return factories.build();

View File

@ -120,7 +120,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeLuceneTestCase
AggregationContext context = new AggregationContext(searchContext);
AggregatorFactories.Builder builder = AggregatorFactories.builder();
builder.add(new NestedAggregator.Factory("test", "nested_field", FilterCachingPolicy.ALWAYS_CACHE));
builder.addAggregator(new NestedAggregator.Factory("test", "nested_field", FilterCachingPolicy.ALWAYS_CACHE));
AggregatorFactories factories = builder.build();
searchContext.aggregations(new SearchContextAggregations(factories));
Aggregator[] aggs = factories.createTopLevelAggregators(context);