Replace bespoke parser for significance heuristics (#50623) (#50659)

This replaces the hand written xcontent parsers for significance
heristics with `ObjectParser` and parsing named xcontent.

As a happy accident, this was the last user of `ParseFieldRegistry` so
this PR entirely removes that class.

Closes #25519
This commit is contained in:
Nik Everett 2020-01-06 12:57:43 -05:00 committed by GitHub
parent fa57813c6d
commit f576aefd0f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 162 additions and 297 deletions

View File

@ -40,7 +40,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.pipeline.MovAvgModel; import org.elasticsearch.search.aggregations.pipeline.MovAvgModel;
import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@ -56,6 +55,7 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.function.BiFunction;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
@ -74,7 +74,7 @@ public interface SearchPlugin {
* The new {@link SignificanceHeuristic}s defined by this plugin. {@linkplain SignificanceHeuristic}s are used by the * The new {@link SignificanceHeuristic}s defined by this plugin. {@linkplain SignificanceHeuristic}s are used by the
* {@link SignificantTerms} aggregation to pick which terms are significant for a given query. * {@link SignificantTerms} aggregation to pick which terms are significant for a given query.
*/ */
default List<SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() { default List<SignificanceHeuristicSpec<?>> getSignificanceHeuristics() {
return emptyList(); return emptyList();
} }
/** /**
@ -146,6 +146,19 @@ public interface SearchPlugin {
} }
} }
/**
* Specification of custom {@link SignificanceHeuristic}.
*/
class SignificanceHeuristicSpec<T extends SignificanceHeuristic> extends SearchExtensionSpec<T, BiFunction<XContentParser, Void, T>> {
public SignificanceHeuristicSpec(ParseField name, Writeable.Reader<T> reader, BiFunction<XContentParser, Void, T> parser) {
super(name, reader, parser);
}
public SignificanceHeuristicSpec(String name, Writeable.Reader<T> reader, BiFunction<XContentParser, Void, T> parser) {
super(name, reader, parser);
}
}
/** /**
* Specification for a {@link Suggester}. * Specification for a {@link Suggester}.
*/ */

View File

@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.AbstractObjectParser;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
@ -47,6 +48,9 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.function.BiConsumer;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/** /**
* {@link Script} represents used-defined input that can be used to * {@link Script} represents used-defined input that can be used to
@ -268,6 +272,16 @@ public final class Script implements ToXContentObject, Writeable {
PARSER.declareField(Builder::setParams, XContentParser::map, PARAMS_PARSE_FIELD, ValueType.OBJECT); PARSER.declareField(Builder::setParams, XContentParser::map, PARAMS_PARSE_FIELD, ValueType.OBJECT);
} }
/**
* Declare a script field on an {@link ObjectParser}.
* @param <T> Whatever type the {@linkplain ObjectParser} is parsing.
* @param parser the parser itself
* @param consumer the consumer for the script
*/
public static <T> void declareScript(AbstractObjectParser<T, ?> parser, BiConsumer<T, Script> consumer) {
parser.declareField(constructorArg(), (p, c) -> Script.parse(p), Script.SCRIPT_PARSE_FIELD, ValueType.OBJECT_OR_STRING);
}
/** /**
* Convenience method to call {@link Script#parse(XContentParser, String)} * Convenience method to call {@link Script#parse(XContentParser, String)}
* using the default scripting language. * using the default scripting language.

View File

@ -68,6 +68,7 @@ import org.elasticsearch.index.query.SpanContainingQueryBuilder;
import org.elasticsearch.index.query.SpanFirstQueryBuilder; import org.elasticsearch.index.query.SpanFirstQueryBuilder;
import org.elasticsearch.index.query.SpanMultiTermQueryBuilder; import org.elasticsearch.index.query.SpanMultiTermQueryBuilder;
import org.elasticsearch.index.query.SpanNearQueryBuilder; import org.elasticsearch.index.query.SpanNearQueryBuilder;
import org.elasticsearch.index.query.SpanNearQueryBuilder.SpanGapQueryBuilder;
import org.elasticsearch.index.query.SpanNotQueryBuilder; import org.elasticsearch.index.query.SpanNotQueryBuilder;
import org.elasticsearch.index.query.SpanOrQueryBuilder; import org.elasticsearch.index.query.SpanOrQueryBuilder;
import org.elasticsearch.index.query.SpanTermQueryBuilder; import org.elasticsearch.index.query.SpanTermQueryBuilder;
@ -97,6 +98,7 @@ import org.elasticsearch.plugins.SearchPlugin.RescorerSpec;
import org.elasticsearch.plugins.SearchPlugin.ScoreFunctionSpec; import org.elasticsearch.plugins.SearchPlugin.ScoreFunctionSpec;
import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec; import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec;
import org.elasticsearch.plugins.SearchPlugin.SearchExtensionSpec; import org.elasticsearch.plugins.SearchPlugin.SearchExtensionSpec;
import org.elasticsearch.plugins.SearchPlugin.SignificanceHeuristicSpec;
import org.elasticsearch.plugins.SearchPlugin.SuggesterSpec; import org.elasticsearch.plugins.SearchPlugin.SuggesterSpec;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactories;
@ -112,8 +114,8 @@ import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregationBui
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilters;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoTileGrid; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoTileGrid;
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
@ -153,7 +155,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Mutua
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
import org.elasticsearch.search.aggregations.bucket.terms.LongRareTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongRareTerms;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
@ -283,11 +284,10 @@ import java.util.Map;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import static java.util.Collections.unmodifiableList;
import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableMap;
import static java.util.Objects.requireNonNull; import static java.util.Objects.requireNonNull;
import static org.elasticsearch.index.query.CommonTermsQueryBuilder.COMMON_TERMS_QUERY_DEPRECATION_MSG; import static org.elasticsearch.index.query.CommonTermsQueryBuilder.COMMON_TERMS_QUERY_DEPRECATION_MSG;
import static org.elasticsearch.index.query.SpanNearQueryBuilder.SpanGapQueryBuilder;
import static java.util.Collections.unmodifiableList;
/** /**
* Sets up things that can be done at search time like queries, aggregations, and suggesters. * Sets up things that can be done at search time like queries, aggregations, and suggesters.
@ -298,8 +298,6 @@ public class SearchModule {
private final boolean transportClient; private final boolean transportClient;
private final Map<String, Highlighter> highlighters; private final Map<String, Highlighter> highlighters;
private final ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry = new ParseFieldRegistry<>(
"significance_heuristic");
private final ParseFieldRegistry<MovAvgModel.AbstractModelParser> movingAverageModelParserRegistry = new ParseFieldRegistry<>( private final ParseFieldRegistry<MovAvgModel.AbstractModelParser> movingAverageModelParserRegistry = new ParseFieldRegistry<>(
"moving_avg_model"); "moving_avg_model");
@ -354,13 +352,6 @@ public class SearchModule {
return highlighters; return highlighters;
} }
/**
* The registry of {@link SignificanceHeuristic}s.
*/
public ParseFieldRegistry<SignificanceHeuristicParser> getSignificanceHeuristicParserRegistry() {
return significanceHeuristicParserRegistry;
}
/** /**
* The registry of {@link MovAvgModel}s. * The registry of {@link MovAvgModel}s.
*/ */
@ -427,12 +418,12 @@ public class SearchModule {
.addResultReader(UnmappedRareTerms.NAME, UnmappedRareTerms::new) .addResultReader(UnmappedRareTerms.NAME, UnmappedRareTerms::new)
.addResultReader(LongRareTerms.NAME, LongRareTerms::new)); .addResultReader(LongRareTerms.NAME, LongRareTerms::new));
registerAggregation(new AggregationSpec(SignificantTermsAggregationBuilder.NAME, SignificantTermsAggregationBuilder::new, registerAggregation(new AggregationSpec(SignificantTermsAggregationBuilder.NAME, SignificantTermsAggregationBuilder::new,
SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry)) SignificantTermsAggregationBuilder::parse)
.addResultReader(SignificantStringTerms.NAME, SignificantStringTerms::new) .addResultReader(SignificantStringTerms.NAME, SignificantStringTerms::new)
.addResultReader(SignificantLongTerms.NAME, SignificantLongTerms::new) .addResultReader(SignificantLongTerms.NAME, SignificantLongTerms::new)
.addResultReader(UnmappedSignificantTerms.NAME, UnmappedSignificantTerms::new)); .addResultReader(UnmappedSignificantTerms.NAME, UnmappedSignificantTerms::new));
registerAggregation(new AggregationSpec(SignificantTextAggregationBuilder.NAME, SignificantTextAggregationBuilder::new, registerAggregation(new AggregationSpec(SignificantTextAggregationBuilder.NAME, SignificantTextAggregationBuilder::new,
SignificantTextAggregationBuilder.getParser(significanceHeuristicParserRegistry))); SignificantTextAggregationBuilder::parse));
registerAggregation(new AggregationSpec(RangeAggregationBuilder.NAME, RangeAggregationBuilder::new, registerAggregation(new AggregationSpec(RangeAggregationBuilder.NAME, RangeAggregationBuilder::new,
RangeAggregationBuilder::parse).addResultReader(InternalRange::new)); RangeAggregationBuilder::parse).addResultReader(InternalRange::new));
registerAggregation(new AggregationSpec(DateRangeAggregationBuilder.NAME, DateRangeAggregationBuilder::new, registerAggregation(new AggregationSpec(DateRangeAggregationBuilder.NAME, DateRangeAggregationBuilder::new,
@ -720,20 +711,22 @@ public class SearchModule {
} }
private void registerSignificanceHeuristics(List<SearchPlugin> plugins) { private void registerSignificanceHeuristics(List<SearchPlugin> plugins) {
registerSignificanceHeuristic(new SearchExtensionSpec<>(ChiSquare.NAME, ChiSquare::new, ChiSquare.PARSER)); registerSignificanceHeuristic(new SignificanceHeuristicSpec<>(ChiSquare.NAME, ChiSquare::new, ChiSquare.PARSER));
registerSignificanceHeuristic(new SearchExtensionSpec<>(GND.NAME, GND::new, GND.PARSER)); registerSignificanceHeuristic(new SignificanceHeuristicSpec<>(GND.NAME, GND::new, GND.PARSER));
registerSignificanceHeuristic(new SearchExtensionSpec<>(JLHScore.NAME, JLHScore::new, JLHScore::parse)); registerSignificanceHeuristic(new SignificanceHeuristicSpec<>(JLHScore.NAME, JLHScore::new, JLHScore.PARSER));
registerSignificanceHeuristic(new SearchExtensionSpec<>(MutualInformation.NAME, MutualInformation::new, MutualInformation.PARSER)); registerSignificanceHeuristic(new SignificanceHeuristicSpec<>(
registerSignificanceHeuristic(new SearchExtensionSpec<>(PercentageScore.NAME, PercentageScore::new, PercentageScore::parse)); MutualInformation.NAME, MutualInformation::new, MutualInformation.PARSER));
registerSignificanceHeuristic(new SearchExtensionSpec<>(ScriptHeuristic.NAME, ScriptHeuristic::new, ScriptHeuristic::parse)); registerSignificanceHeuristic(new SignificanceHeuristicSpec<>(PercentageScore.NAME, PercentageScore::new, PercentageScore.PARSER));
registerSignificanceHeuristic(new SignificanceHeuristicSpec<>(ScriptHeuristic.NAME, ScriptHeuristic::new, ScriptHeuristic.PARSER));
registerFromPlugin(plugins, SearchPlugin::getSignificanceHeuristics, this::registerSignificanceHeuristic); registerFromPlugin(plugins, SearchPlugin::getSignificanceHeuristics, this::registerSignificanceHeuristic);
} }
private void registerSignificanceHeuristic(SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser> heuristic) { private <T extends SignificanceHeuristic> void registerSignificanceHeuristic(SignificanceHeuristicSpec<?> spec) {
significanceHeuristicParserRegistry.register(heuristic.getParser(), heuristic.getName()); namedXContents.add(new NamedXContentRegistry.Entry(
namedWriteables.add(new NamedWriteableRegistry.Entry(SignificanceHeuristic.class, heuristic.getName().getPreferredName(), SignificanceHeuristic.class, spec.getName(), p -> spec.getParser().apply(p, null)));
heuristic.getReader())); namedWriteables.add(new NamedWriteableRegistry.Entry(
SignificanceHeuristic.class, spec.getName().getPreferredName(), spec.getReader()));
} }
private void registerMovingAverageModels(List<SearchPlugin> plugins) { private void registerMovingAverageModels(List<SearchPlugin> plugins) {

View File

@ -23,30 +23,27 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -65,48 +62,36 @@ public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationB
3, 0, 10, -1); 3, 0, 10, -1);
static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore(); static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore();
public static Aggregator.Parser getParser(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) { private static final ObjectParser<SignificantTermsAggregationBuilder, Void> PARSER = new ObjectParser<>(
ObjectParser<SignificantTermsAggregationBuilder, Void> aggregationParser = SignificantTermsAggregationBuilder.NAME,
new ObjectParser<>(SignificantTermsAggregationBuilder.NAME); SignificanceHeuristic.class, SignificantTermsAggregationBuilder::significanceHeuristic, null);
ValuesSourceParserHelper.declareAnyFields(aggregationParser, true, true); static {
ValuesSourceParserHelper.declareAnyFields(PARSER, true, true);
aggregationParser.declareInt(SignificantTermsAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); PARSER.declareInt(SignificantTermsAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME);
aggregationParser.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); PARSER.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME);
aggregationParser.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount, PARSER.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount,
TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME);
aggregationParser.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME); PARSER.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME);
aggregationParser.declareString(SignificantTermsAggregationBuilder::executionHint, PARSER.declareString(SignificantTermsAggregationBuilder::executionHint,
TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME); TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME);
aggregationParser.declareObject(SignificantTermsAggregationBuilder::backgroundFilter, PARSER.declareObject(SignificantTermsAggregationBuilder::backgroundFilter,
(p, context) -> parseInnerQueryBuilder(p), (p, context) -> parseInnerQueryBuilder(p),
SignificantTermsAggregationBuilder.BACKGROUND_FILTER); SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
aggregationParser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())),
IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING); IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
aggregationParser.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)),
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY); IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY);
}
for (String name : significanceHeuristicParserRegistry.getNames()) { public static SignificantTermsAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
aggregationParser.declareObject(SignificantTermsAggregationBuilder::significanceHeuristic, return PARSER.parse(parser, new SignificantTermsAggregationBuilder(aggregationName, null), null);
(p, context) -> {
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
.lookupReturningNullIfNotFound(name, p.getDeprecationHandler());
return significanceHeuristicParser.parse(p);
},
new ParseField(name));
}
return new Aggregator.Parser() {
@Override
public AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return aggregationParser.parse(parser, new SignificantTermsAggregationBuilder(aggregationName, null), null);
}
};
} }
private IncludeExclude includeExclude = null; private IncludeExclude includeExclude = null;

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder;
@ -32,11 +31,9 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.AggregationInitializationException;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
@ -69,11 +66,10 @@ public class SignificantTextAggregationBuilder extends AbstractAggregationBuilde
DEFAULT_BUCKET_COUNT_THRESHOLDS); DEFAULT_BUCKET_COUNT_THRESHOLDS);
private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC; private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC;
public static Aggregator.Parser getParser( private static final ObjectParser<SignificantTextAggregationBuilder, Void> PARSER = new ObjectParser<>(
ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry) { SignificantTextAggregationBuilder.NAME,
ObjectParser<SignificantTextAggregationBuilder, Void> PARSER = new ObjectParser<>( SignificanceHeuristic.class, SignificantTextAggregationBuilder::significanceHeuristic, null);
SignificantTextAggregationBuilder.NAME); static {
PARSER.declareInt(SignificantTextAggregationBuilder::shardSize, PARSER.declareInt(SignificantTextAggregationBuilder::shardSize,
TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME);
@ -105,23 +101,9 @@ public class SignificantTextAggregationBuilder extends AbstractAggregationBuilde
PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), PARSER.declareField((b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)),
IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD,
ObjectParser.ValueType.STRING_ARRAY); ObjectParser.ValueType.STRING_ARRAY);
}
for (String name : significanceHeuristicParserRegistry.getNames()) { public static SignificantTextAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
PARSER.declareObject(SignificantTextAggregationBuilder::significanceHeuristic, return PARSER.parse(parser, new SignificantTextAggregationBuilder(aggregationName, null), null);
(p, context) -> {
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
.lookupReturningNullIfNotFound(name, p.getDeprecationHandler());
return significanceHeuristicParser.parse(p);
}, new ParseField(name));
}
return new Aggregator.Parser() {
@Override
public AggregationBuilder parse(String aggregationName, XContentParser parser)
throws IOException {
return PARSER.parse(parser,
new SignificantTextAggregationBuilder(aggregationName, null), null);
}
};
} }
protected SignificantTextAggregationBuilder(SignificantTextAggregationBuilder clone, protected SignificantTextAggregationBuilder(SignificantTextAggregationBuilder clone,

View File

@ -22,12 +22,18 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
public class ChiSquare extends NXYSignificanceHeuristic { public class ChiSquare extends NXYSignificanceHeuristic {
public static final String NAME = "chi_square"; public static final String NAME = "chi_square";
public static final ConstructingObjectParser<ChiSquare, Void> PARSER = new ConstructingObjectParser<>(
NAME, buildFromParsedArgs(ChiSquare::new));
static {
NXYSignificanceHeuristic.declareParseFields(PARSER);
}
public ChiSquare(boolean includeNegatives, boolean backgroundIsSuperset) { public ChiSquare(boolean includeNegatives, boolean backgroundIsSuperset) {
super(includeNegatives, backgroundIsSuperset); super(includeNegatives, backgroundIsSuperset);
@ -84,15 +90,7 @@ public class ChiSquare extends NXYSignificanceHeuristic {
return builder; return builder;
} }
public static final SignificanceHeuristicParser PARSER = new NXYParser() {
@Override
protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
return new ChiSquare(includeNegatives, backgroundIsSuperset);
}
};
public static class ChiSquareBuilder extends NXYSignificanceHeuristic.NXYBuilder { public static class ChiSquareBuilder extends NXYSignificanceHeuristic.NXYBuilder {
public ChiSquareBuilder(boolean includeNegatives, boolean backgroundIsSuperset) { public ChiSquareBuilder(boolean includeNegatives, boolean backgroundIsSuperset) {
super(includeNegatives, backgroundIsSuperset); super(includeNegatives, backgroundIsSuperset);
} }

View File

@ -21,17 +21,24 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics; package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class GND extends NXYSignificanceHeuristic { public class GND extends NXYSignificanceHeuristic {
public static final String NAME = "gnd"; public static final String NAME = "gnd";
public static final ConstructingObjectParser<GND, Void> PARSER = new ConstructingObjectParser<>(NAME, args -> {
boolean backgroundIsSuperset = args[0] == null ? true : (boolean) args[0];
return new GND(backgroundIsSuperset);
});
static {
PARSER.declareBoolean(optionalConstructorArg(), BACKGROUND_IS_SUPERSET);
}
public GND(boolean backgroundIsSuperset) { public GND(boolean backgroundIsSuperset) {
super(true, backgroundIsSuperset); super(true, backgroundIsSuperset);
@ -105,33 +112,7 @@ public class GND extends NXYSignificanceHeuristic {
return builder; return builder;
} }
public static final SignificanceHeuristicParser PARSER = new NXYParser() {
@Override
protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
return new GND(backgroundIsSuperset);
}
@Override
public SignificanceHeuristic parse(XContentParser parser) throws IOException, QueryShardException {
String givenName = parser.currentName();
boolean backgroundIsSuperset = true;
XContentParser.Token token = parser.nextToken();
while (!token.equals(XContentParser.Token.END_OBJECT)) {
if (BACKGROUND_IS_SUPERSET.match(parser.currentName(), parser.getDeprecationHandler())) {
parser.nextToken();
backgroundIsSuperset = parser.booleanValue();
} else {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]",
givenName, parser.currentName());
}
token = parser.nextToken();
}
return newHeuristic(true, backgroundIsSuperset);
}
};
public static class GNDBuilder extends NXYBuilder { public static class GNDBuilder extends NXYBuilder {
public GNDBuilder(boolean backgroundIsSuperset) { public GNDBuilder(boolean backgroundIsSuperset) {
super(true, backgroundIsSuperset); super(true, backgroundIsSuperset);
} }

View File

@ -21,17 +21,16 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics; package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException; import java.io.IOException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
public class JLHScore extends SignificanceHeuristic { public class JLHScore extends SignificanceHeuristic {
public static final String NAME = "jlh"; public static final String NAME = "jlh";
public static final ObjectParser<JLHScore, Void> PARSER = new ObjectParser<>(NAME, JLHScore::new);
public JLHScore() { public JLHScore() {
} }
@ -103,17 +102,6 @@ public class JLHScore extends SignificanceHeuristic {
return builder; return builder;
} }
public static SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException {
// move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException(
"failed to parse [jlh] significance heuristic. expected an empty object, but found [{}] instead",
parser.currentToken());
}
return new JLHScore();
}
@Override @Override
public boolean equals(Object obj) { public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) { if (obj == null || obj.getClass() != getClass()) {

View File

@ -22,12 +22,18 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
public class MutualInformation extends NXYSignificanceHeuristic { public class MutualInformation extends NXYSignificanceHeuristic {
public static final String NAME = "mutual_information"; public static final String NAME = "mutual_information";
public static final ConstructingObjectParser<MutualInformation, Void> PARSER = new ConstructingObjectParser<>(
NAME, buildFromParsedArgs(MutualInformation::new));
static {
NXYSignificanceHeuristic.declareParseFields(PARSER);
}
private static final double log2 = Math.log(2.0); private static final double log2 = Math.log(2.0);
@ -118,15 +124,7 @@ public class MutualInformation extends NXYSignificanceHeuristic {
return builder; return builder;
} }
public static final SignificanceHeuristicParser PARSER = new NXYParser() {
@Override
protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
return new MutualInformation(includeNegatives, backgroundIsSuperset);
}
};
public static class MutualInformationBuilder extends NXYBuilder { public static class MutualInformationBuilder extends NXYBuilder {
public MutualInformationBuilder(boolean includeNegatives, boolean backgroundIsSuperset) { public MutualInformationBuilder(boolean includeNegatives, boolean backgroundIsSuperset) {
super(includeNegatives, backgroundIsSuperset); super(includeNegatives, backgroundIsSuperset);
} }

View File

@ -21,15 +21,17 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics; package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
import java.util.function.BiFunction;
import java.util.function.Function;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic { public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
@ -160,34 +162,24 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
backgroundIsSuperset); backgroundIsSuperset);
} }
public abstract static class NXYParser implements SignificanceHeuristicParser { /**
* Set up and {@linkplain ConstructingObjectParser} to accept the standard arguments for an {@linkplain NXYSignificanceHeuristic}.
@Override */
public SignificanceHeuristic parse(XContentParser parser) protected static void declareParseFields(ConstructingObjectParser<? extends NXYSignificanceHeuristic, ?> parser) {
throws IOException, QueryShardException { parser.declareBoolean(optionalConstructorArg(), INCLUDE_NEGATIVES_FIELD);
String givenName = parser.currentName(); parser.declareBoolean(optionalConstructorArg(), BACKGROUND_IS_SUPERSET);
boolean includeNegatives = false;
boolean backgroundIsSuperset = true;
XContentParser.Token token = parser.nextToken();
while (!token.equals(XContentParser.Token.END_OBJECT)) {
if (INCLUDE_NEGATIVES_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) {
parser.nextToken();
includeNegatives = parser.booleanValue();
} else if (BACKGROUND_IS_SUPERSET.match(parser.currentName(), parser.getDeprecationHandler())) {
parser.nextToken();
backgroundIsSuperset = parser.booleanValue();
} else {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]",
givenName, parser.currentName());
}
token = parser.nextToken();
}
return newHeuristic(includeNegatives, backgroundIsSuperset);
}
protected abstract SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset);
} }
/**
* Adapt a standard two argument ctor into one that consumes a {@linkplain ConstructingObjectParser}'s fields.
*/
protected static <T> Function<Object[], T> buildFromParsedArgs(BiFunction<Boolean, Boolean, T> ctor) {
return args -> {
boolean includeNegatives = args[0] == null ? false : (boolean) args[0];
boolean backgroundIsSuperset = args[1] == null ? true : (boolean) args[1];
return ctor.apply(includeNegatives, backgroundIsSuperset);
};
}
protected abstract static class NXYBuilder implements SignificanceHeuristicBuilder { protected abstract static class NXYBuilder implements SignificanceHeuristicBuilder {
protected boolean includeNegatives = true; protected boolean includeNegatives = true;

View File

@ -24,6 +24,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
@ -32,6 +33,7 @@ import java.io.IOException;
public class PercentageScore extends SignificanceHeuristic { public class PercentageScore extends SignificanceHeuristic {
public static final String NAME = "percentage"; public static final String NAME = "percentage";
public static final ObjectParser<PercentageScore, Void> PARSER = new ObjectParser<>(NAME, PercentageScore::new);
public PercentageScore() { public PercentageScore() {
} }

View File

@ -21,13 +21,11 @@
package org.elasticsearch.search.aggregations.bucket.significant.heuristics; package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.SignificantTermsHeuristicScoreScript; import org.elasticsearch.script.SignificantTermsHeuristicScoreScript;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
@ -37,8 +35,15 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
public class ScriptHeuristic extends SignificanceHeuristic { public class ScriptHeuristic extends SignificanceHeuristic {
public static final String NAME = "script_heuristic"; public static final String NAME = "script_heuristic";
public static final ConstructingObjectParser<ScriptHeuristic, Void> PARSER = new ConstructingObjectParser<>(NAME, args ->
new ScriptHeuristic((Script) args[0]));
static {
Script.declareScript(PARSER, constructorArg());
}
private final Script script; private final Script script;
@ -153,32 +158,6 @@ public class ScriptHeuristic extends SignificanceHeuristic {
return Objects.equals(script, other.script); return Objects.equals(script, other.script);
} }
public static SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException {
String heuristicName = parser.currentName();
Script script = null;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token.equals(XContentParser.Token.FIELD_NAME)) {
currentFieldName = parser.currentName();
} else {
if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
script = Script.parse(parser);
} else {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]",
heuristicName, currentFieldName);
}
}
}
if (script == null) {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. no script found in script_heuristic",
heuristicName);
}
return new ScriptHeuristic(script);
}
public final class LongAccessor extends Number { public final class LongAccessor extends Number {
public long value; public long value;
@Override @Override

View File

@ -1,34 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
* Parses {@link SignificanceHeuristic}s from an {@link XContentParser}.
*/
@FunctionalInterface
public interface SignificanceHeuristicParser {
SignificanceHeuristic parse(XContentParser parser) throws IOException, ParsingException;
}

View File

@ -40,8 +40,6 @@ import org.elasticsearch.search.aggregations.BaseAggregationBuilder;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder;
@ -122,8 +120,8 @@ public class SearchModuleTests extends ESTestCase {
SearchPlugin registersDupeSignificanceHeuristic = new SearchPlugin() { SearchPlugin registersDupeSignificanceHeuristic = new SearchPlugin() {
@Override @Override
public List<SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() { public List<SignificanceHeuristicSpec<?>> getSignificanceHeuristics() {
return singletonList(new SearchExtensionSpec<>(ChiSquare.NAME, ChiSquare::new, ChiSquare.PARSER)); return singletonList(new SignificanceHeuristicSpec<>(ChiSquare.NAME, ChiSquare::new, ChiSquare.PARSER));
} }
}; };
expectThrows(IllegalArgumentException.class, registryForPlugin(registersDupeSignificanceHeuristic)); expectThrows(IllegalArgumentException.class, registryForPlugin(registersDupeSignificanceHeuristic));

View File

@ -25,14 +25,13 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.MockScriptPlugin;
@ -49,7 +48,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
@ -178,9 +176,8 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
public static class CustomSignificanceHeuristicPlugin extends MockScriptPlugin implements SearchPlugin { public static class CustomSignificanceHeuristicPlugin extends MockScriptPlugin implements SearchPlugin {
@Override @Override
public List<SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>> getSignificanceHeuristics() { public List<SignificanceHeuristicSpec<?>> getSignificanceHeuristics() {
return singletonList(new SearchExtensionSpec<SignificanceHeuristic, SignificanceHeuristicParser>(SimpleHeuristic.NAME, return singletonList(new SignificanceHeuristicSpec<>(SimpleHeuristic.NAME, SimpleHeuristic::new, SimpleHeuristic.PARSER));
SimpleHeuristic::new, (parser) -> SimpleHeuristic.parse(parser)));
} }
@Override @Override
@ -214,6 +211,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
public static class SimpleHeuristic extends SignificanceHeuristic { public static class SimpleHeuristic extends SignificanceHeuristic {
public static final String NAME = "simple"; public static final String NAME = "simple";
public static final ObjectParser<SimpleHeuristic, Void> PARSER = new ObjectParser<>(NAME, SimpleHeuristic::new);
public SimpleHeuristic() { public SimpleHeuristic() {
} }
@ -268,12 +266,6 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) { public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) {
return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0; return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0;
} }
public static SignificanceHeuristic parse(XContentParser parser)
throws IOException, QueryShardException {
parser.nextToken();
return new SimpleHeuristic();
}
} }
public void testXContentResponse() throws Exception { public void testXContentResponse() throws Exception {

View File

@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParseException;
@ -46,7 +46,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHSc
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.TestSearchContext;
@ -218,81 +217,62 @@ public class SignificanceHeuristicTests extends ESTestCase {
// 1. The output of the builders can actually be parsed // 1. The output of the builders can actually be parsed
// 2. The parser does not swallow parameters after a significance heuristic was defined // 2. The parser does not swallow parameters after a significance heuristic was defined
public void testBuilderAndParser() throws Exception { public void testBuilderAndParser() throws Exception {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
ParseFieldRegistry<SignificanceHeuristicParser> heuristicParserMapper = searchModule.getSignificanceHeuristicParserRegistry();
// test jlh with string // test jlh with string
assertTrue(parseFromString(heuristicParserMapper, "\"jlh\":{}") instanceof JLHScore); assertTrue(parseFromString("\"jlh\":{}") instanceof JLHScore);
// test gnd with string // test gnd with string
assertTrue(parseFromString(heuristicParserMapper, "\"gnd\":{}") instanceof GND); assertTrue(parseFromString("\"gnd\":{}") instanceof GND);
// test mutual information with string // test mutual information with string
boolean includeNegatives = randomBoolean(); boolean includeNegatives = randomBoolean();
boolean backgroundIsSuperset = randomBoolean(); boolean backgroundIsSuperset = randomBoolean();
String mutual = "\"mutual_information\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" String mutual = "\"mutual_information\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":"
+ backgroundIsSuperset + "}"; + backgroundIsSuperset + "}";
assertEquals(new MutualInformation(includeNegatives, backgroundIsSuperset), assertEquals(new MutualInformation(includeNegatives, backgroundIsSuperset),
parseFromString(heuristicParserMapper, mutual)); parseFromString(mutual));
String chiSquare = "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" String chiSquare = "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":"
+ backgroundIsSuperset + "}"; + backgroundIsSuperset + "}";
assertEquals(new ChiSquare(includeNegatives, backgroundIsSuperset), assertEquals(new ChiSquare(includeNegatives, backgroundIsSuperset),
parseFromString(heuristicParserMapper, chiSquare)); parseFromString(chiSquare));
// test with builders // test with builders
assertThat(parseFromBuilder(heuristicParserMapper, new JLHScore()), instanceOf(JLHScore.class)); assertThat(parseFromBuilder(new JLHScore()), instanceOf(JLHScore.class));
assertThat(parseFromBuilder(heuristicParserMapper, new GND(backgroundIsSuperset)), instanceOf(GND.class)); assertThat(parseFromBuilder(new GND(backgroundIsSuperset)), instanceOf(GND.class));
assertEquals(new MutualInformation(includeNegatives, backgroundIsSuperset), assertEquals(new MutualInformation(includeNegatives, backgroundIsSuperset),
parseFromBuilder(heuristicParserMapper, new MutualInformation(includeNegatives, backgroundIsSuperset))); parseFromBuilder(new MutualInformation(includeNegatives, backgroundIsSuperset)));
assertEquals(new ChiSquare(includeNegatives, backgroundIsSuperset), assertEquals(new ChiSquare(includeNegatives, backgroundIsSuperset),
parseFromBuilder(heuristicParserMapper, new ChiSquare(includeNegatives, backgroundIsSuperset))); parseFromBuilder(new ChiSquare(includeNegatives, backgroundIsSuperset)));
// test exceptions // test exceptions
String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}"; String expectedError = "unknown field [unknown_field]";
String expectedError = "unknown field [some_unknown_field]"; checkParseException("\"mutual_information\":{\"include_negatives\": false, \"unknown_field\": false}", expectedError);
checkParseException(heuristicParserMapper, faultyHeuristicdefinition, expectedError); checkParseException("\"chi_square\":{\"unknown_field\": true}", expectedError);
checkParseException("\"jlh\":{\"unknown_field\": true}", expectedError);
faultyHeuristicdefinition = "\"chi_square\":{\"unknown_field\": true}"; checkParseException("\"gnd\":{\"unknown_field\": true}", expectedError);
expectedError = "unknown field [unknown_field]";
checkParseException(heuristicParserMapper, faultyHeuristicdefinition, expectedError);
faultyHeuristicdefinition = "\"jlh\":{\"unknown_field\": true}";
expectedError = "expected an empty object, but found ";
checkParseException(heuristicParserMapper, faultyHeuristicdefinition, expectedError);
faultyHeuristicdefinition = "\"gnd\":{\"unknown_field\": true}";
expectedError = "unknown field [unknown_field]";
checkParseException(heuristicParserMapper, faultyHeuristicdefinition, expectedError);
} }
protected void checkParseException(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry, protected void checkParseException(String faultyHeuristicDefinition, String expectedError) throws IOException {
String faultyHeuristicDefinition, String expectedError) throws IOException {
try (XContentParser stParser = createParser(JsonXContent.jsonXContent, try (XContentParser stParser = createParser(JsonXContent.jsonXContent,
"{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}")) { "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}")) {
stParser.nextToken(); stParser.nextToken();
SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", stParser); SignificantTermsAggregationBuilder.parse("testagg", stParser);
fail(); fail();
} catch (XContentParseException e) { } catch (XContentParseException e) {
assertThat(e.getCause().getMessage(), containsString(expectedError)); assertThat(e.getMessage(), containsString(expectedError));
} }
} }
protected SignificanceHeuristic parseFromBuilder(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry, protected SignificanceHeuristic parseFromBuilder(SignificanceHeuristic significanceHeuristic) throws IOException {
SignificanceHeuristic significanceHeuristic) throws IOException {
SignificantTermsAggregationBuilder stBuilder = significantTerms("testagg"); SignificantTermsAggregationBuilder stBuilder = significantTerms("testagg");
stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200); stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200);
XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder();
stBuilder.internalXContent(stXContentBuilder, null); stBuilder.internalXContent(stXContentBuilder, null);
XContentParser stParser = createParser(JsonXContent.jsonXContent, Strings.toString(stXContentBuilder)); XContentParser stParser = createParser(JsonXContent.jsonXContent, Strings.toString(stXContentBuilder));
return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); return parseSignificanceHeuristic(stParser);
} }
private static SignificanceHeuristic parseSignificanceHeuristic( private static SignificanceHeuristic parseSignificanceHeuristic(XContentParser stParser) throws IOException {
ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry,
XContentParser stParser) throws IOException {
stParser.nextToken(); stParser.nextToken();
SignificantTermsAggregationBuilder aggregatorFactory = SignificantTermsAggregationBuilder aggregatorFactory = SignificantTermsAggregationBuilder.parse("testagg", stParser);
(SignificantTermsAggregationBuilder) SignificantTermsAggregationBuilder.getParser(
significanceHeuristicParserRegistry).parse("testagg", stParser);
stParser.nextToken(); stParser.nextToken();
assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L)); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L));
assertThat(stParser.currentToken(), equalTo(null)); assertThat(stParser.currentToken(), equalTo(null));
@ -300,11 +280,10 @@ public class SignificanceHeuristicTests extends ESTestCase {
return aggregatorFactory.significanceHeuristic(); return aggregatorFactory.significanceHeuristic();
} }
protected SignificanceHeuristic parseFromString(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry, protected SignificanceHeuristic parseFromString(String heuristicString) throws IOException {
String heuristicString) throws IOException {
try (XContentParser stParser = createParser(JsonXContent.jsonXContent, try (XContentParser stParser = createParser(JsonXContent.jsonXContent,
"{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}")) { "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}")) {
return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); return parseSignificanceHeuristic(stParser);
} }
} }
@ -494,4 +473,9 @@ public class SignificanceHeuristicTests extends ESTestCase {
gnd = new GND(false); gnd = new GND(false);
assertThat(gnd.getScore(0, 0, 0, 0), equalTo(0.0)); assertThat(gnd.getScore(0, 0, 0, 0), equalTo(0.0));
} }
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedXContents());
}
} }