Normalize registration for SignificanceHeuristics

When I pulled on the thread that is "Remove PROTOTYPEs from
SignificanceHeuristics" I ended up removing SignificanceHeuristicStreams
and replacing it with readNamedWriteable. That seems like a lot at once
but it made sense at the time. And it is what we want in the end, I think.

Anyway, this also converts registration of SignificanceHeuristics to
use ParseFieldRegistry to make them consistent with Queries, Aggregations
and lots of other stuff.

Adds a new and wonderous hack to support serialization checking of
NamedWriteables registered by plugins!

Related to #17085
This commit is contained in:
Nik Everett 2016-04-18 14:25:42 -04:00
parent d62376ce33
commit 65f6f6bc8d
20 changed files with 273 additions and 412 deletions

View File

@ -146,8 +146,14 @@ import org.elasticsearch.search.aggregations.bucket.significant.SignificantStrin
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser;
import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms; import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper;
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
@ -273,9 +279,10 @@ public class SearchModule extends AbstractModule {
private final ParseFieldRegistry<PipelineAggregator.Parser> pipelineAggregationParserRegistry = new ParseFieldRegistry<>( private final ParseFieldRegistry<PipelineAggregator.Parser> pipelineAggregationParserRegistry = new ParseFieldRegistry<>(
"pipline_aggregation"); "pipline_aggregation");
private final AggregatorParsers aggregatorParsers = new AggregatorParsers(aggregationParserRegistry, pipelineAggregationParserRegistry); private final AggregatorParsers aggregatorParsers = new AggregatorParsers(aggregationParserRegistry, pipelineAggregationParserRegistry);
private final ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry = new ParseFieldRegistry<>(
"significance_heuristic");
private final Set<Class<? extends FetchSubPhase>> fetchSubPhases = new HashSet<>(); private final Set<Class<? extends FetchSubPhase>> fetchSubPhases = new HashSet<>();
private final Set<SignificanceHeuristicParser> heuristicParsers = new HashSet<>();
private final Set<MovAvgModel.AbstractModelParser> modelParsers = new HashSet<>(); private final Set<MovAvgModel.AbstractModelParser> modelParsers = new HashSet<>();
private final Settings settings; private final Settings settings;
@ -294,6 +301,7 @@ public class SearchModule extends AbstractModule {
registerBuiltinRescorers(); registerBuiltinRescorers();
registerBuiltinSorts(); registerBuiltinSorts();
registerBuiltinValueFormats(); registerBuiltinValueFormats();
registerBuiltinSignificanceHeuristics();
} }
public void registerHighlighter(String key, Class<? extends Highlighter> clazz) { public void registerHighlighter(String key, Class<? extends Highlighter> clazz) {
@ -359,8 +367,25 @@ public class SearchModule extends AbstractModule {
fetchSubPhases.add(subPhase); fetchSubPhases.add(subPhase);
} }
public void registerHeuristicParser(SignificanceHeuristicParser parser) { /**
heuristicParsers.add(parser); * Register a {@link SignificanceHeuristic}.
*
* @param heuristicName the name(s) at which the heuristic is parsed and streamed. The {@link ParseField#getPreferredName()} is the name
* under which it is streamed. All names work for the parser.
* @param reader reads the heuristic from a stream
* @param parser reads the heuristic from a XContentParser
*/
public void registerSignificanceHeuristic(ParseField heuristicName, Writeable.Reader<SignificanceHeuristic> reader,
SignificanceHeuristicParser parser) {
significanceHeuristicParserRegistry.register(parser, heuristicName);
namedWriteableRegistry.register(SignificanceHeuristic.class, heuristicName.getPreferredName(), reader);
}
/**
* The registry of {@link SignificanceHeuristic}s.
*/
public ParseFieldRegistry<SignificanceHeuristicParser> getSignificanceHeuristicParserRegistry() {
return significanceHeuristicParserRegistry;
} }
public void registerModelParser(MovAvgModel.AbstractModelParser parser) { public void registerModelParser(MovAvgModel.AbstractModelParser parser) {
@ -432,11 +457,8 @@ public class SearchModule extends AbstractModule {
} }
protected void configureAggs() { protected void configureAggs() {
MovAvgModelParserMapper movAvgModelParserMapper = new MovAvgModelParserMapper(modelParsers); MovAvgModelParserMapper movAvgModelParserMapper = new MovAvgModelParserMapper(modelParsers);
SignificanceHeuristicParserMapper significanceHeuristicParserMapper = new SignificanceHeuristicParserMapper(heuristicParsers);
registerAggregation(AvgAggregatorBuilder::new, new AvgParser(), AvgAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(AvgAggregatorBuilder::new, new AvgParser(), AvgAggregatorBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(SumAggregatorBuilder::new, new SumParser(), SumAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(SumAggregatorBuilder::new, new SumParser(), SumAggregatorBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(MinAggregatorBuilder::new, new MinParser(), MinAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(MinAggregatorBuilder::new, new MinParser(), MinAggregatorBuilder.AGGREGATION_NAME_FIELD);
@ -462,7 +484,7 @@ public class SearchModule extends AbstractModule {
DiversifiedAggregatorBuilder.AGGREGATION_NAME_FIELD); DiversifiedAggregatorBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(TermsAggregatorBuilder::new, new TermsParser(), TermsAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(TermsAggregatorBuilder::new, new TermsParser(), TermsAggregatorBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(SignificantTermsAggregatorBuilder::new, registerAggregation(SignificantTermsAggregatorBuilder::new,
new SignificantTermsParser(significanceHeuristicParserMapper, queryParserRegistry), new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry),
SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD); SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD); registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
@ -581,6 +603,15 @@ public class SearchModule extends AbstractModule {
registerValueFormat(DocValueFormat.RAW.getWriteableName(), in -> DocValueFormat.RAW); registerValueFormat(DocValueFormat.RAW.getWriteableName(), in -> DocValueFormat.RAW);
} }
private void registerBuiltinSignificanceHeuristics() {
registerSignificanceHeuristic(ChiSquare.NAMES_FIELD, ChiSquare::new, ChiSquare.PARSER);
registerSignificanceHeuristic(GND.NAMES_FIELD, GND::new, GND.PARSER);
registerSignificanceHeuristic(JLHScore.NAMES_FIELD, JLHScore::new, JLHScore::parse);
registerSignificanceHeuristic(MutualInformation.NAMES_FIELD, MutualInformation::new, MutualInformation.PARSER);
registerSignificanceHeuristic(PercentageScore.NAMES_FIELD, PercentageScore::new, PercentageScore::parse);
registerSignificanceHeuristic(ScriptHeuristic.NAMES_FIELD, ScriptHeuristic::new, ScriptHeuristic::parse);
}
private void registerBuiltinQueryParsers() { private void registerBuiltinQueryParsers() {
registerQuery(MatchQueryBuilder::new, MatchQueryBuilder::fromXContent, MatchQueryBuilder.QUERY_NAME_FIELD); registerQuery(MatchQueryBuilder::new, MatchQueryBuilder::fromXContent, MatchQueryBuilder.QUERY_NAME_FIELD);
registerQuery(MatchPhraseQueryBuilder::new, MatchPhraseQueryBuilder::fromXContent, MatchPhraseQueryBuilder.QUERY_NAME_FIELD); registerQuery(MatchPhraseQueryBuilder::new, MatchPhraseQueryBuilder::fromXContent, MatchPhraseQueryBuilder.QUERY_NAME_FIELD);

View File

@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext; import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams; import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException; import java.io.IOException;
@ -207,7 +206,7 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
this.minDocCount = in.readVLong(); this.minDocCount = in.readVLong();
this.subsetSize = in.readVLong(); this.subsetSize = in.readVLong();
this.supersetSize = in.readVLong(); this.supersetSize = in.readVLong();
significanceHeuristic = SignificanceHeuristicStreams.read(in); significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class);
int size = in.readVInt(); int size = in.readVInt();
List<InternalSignificantTerms.Bucket> buckets = new ArrayList<>(size); List<InternalSignificantTerms.Bucket> buckets = new ArrayList<>(size);
@ -228,7 +227,7 @@ public class SignificantLongTerms extends InternalSignificantTerms<SignificantLo
out.writeVLong(minDocCount); out.writeVLong(minDocCount);
out.writeVLong(subsetSize); out.writeVLong(subsetSize);
out.writeVLong(supersetSize); out.writeVLong(supersetSize);
SignificanceHeuristicStreams.writeTo(significanceHeuristic, out); out.writeNamedWriteable(significanceHeuristic);
out.writeVInt(buckets.size()); out.writeVInt(buckets.size());
for (InternalSignificantTerms.Bucket bucket : buckets) { for (InternalSignificantTerms.Bucket bucket : buckets) {

View File

@ -28,12 +28,10 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.BucketStreamContext; import org.elasticsearch.search.aggregations.bucket.BucketStreamContext;
import org.elasticsearch.search.aggregations.bucket.BucketStreams; import org.elasticsearch.search.aggregations.bucket.BucketStreams;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -197,7 +195,7 @@ public class SignificantStringTerms extends InternalSignificantTerms<Significant
this.minDocCount = in.readVLong(); this.minDocCount = in.readVLong();
this.subsetSize = in.readVLong(); this.subsetSize = in.readVLong();
this.supersetSize = in.readVLong(); this.supersetSize = in.readVLong();
significanceHeuristic = SignificanceHeuristicStreams.read(in); significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class);
int size = in.readVInt(); int size = in.readVInt();
List<InternalSignificantTerms.Bucket> buckets = new ArrayList<>(size); List<InternalSignificantTerms.Bucket> buckets = new ArrayList<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
@ -215,7 +213,7 @@ public class SignificantStringTerms extends InternalSignificantTerms<Significant
out.writeVLong(minDocCount); out.writeVLong(minDocCount);
out.writeVLong(subsetSize); out.writeVLong(subsetSize);
out.writeVLong(supersetSize); out.writeVLong(supersetSize);
SignificanceHeuristicStreams.writeTo(significanceHeuristic, out); out.writeNamedWriteable(significanceHeuristic);
out.writeVInt(buckets.size()); out.writeVInt(buckets.size());
for (InternalSignificantTerms.Bucket bucket : buckets) { for (InternalSignificantTerms.Bucket bucket : buckets) {
bucket.writeTo(out); bucket.writeTo(out);

View File

@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder;
@ -55,12 +54,13 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
static final TermsAggregator.BucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = new TermsAggregator.BucketCountThresholds( static final TermsAggregator.BucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = new TermsAggregator.BucketCountThresholds(
3, 0, 10, -1); 3, 0, 10, -1);
static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore();
private IncludeExclude includeExclude = null; private IncludeExclude includeExclude = null;
private String executionHint = null; private String executionHint = null;
private QueryBuilder<?> filterBuilder = null; private QueryBuilder<?> filterBuilder = null;
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS); private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS);
private SignificanceHeuristic significanceHeuristic = JLHScore.PROTOTYPE; private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC;
public SignificantTermsAggregatorBuilder(String name, ValueType valueType) { public SignificantTermsAggregatorBuilder(String name, ValueType valueType) {
super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType); super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType);
@ -79,7 +79,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
if (in.readBoolean()) { if (in.readBoolean()) {
includeExclude = IncludeExclude.readFromStream(in); includeExclude = IncludeExclude.readFromStream(in);
} }
significanceHeuristic = SignificanceHeuristicStreams.read(in); significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class);
} }
@Override @Override
@ -96,7 +96,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
if (hasIncExc) { if (hasIncExc) {
includeExclude.writeTo(out); includeExclude.writeTo(out);
} }
SignificanceHeuristicStreams.writeTo(significanceHeuristic, out); out.writeNamedWriteable(significanceHeuristic);
} }
@Override @Override

View File

@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.significant;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
@ -28,7 +29,6 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper;
import org.elasticsearch.search.aggregations.bucket.terms.AbstractTermsParser; import org.elasticsearch.search.aggregations.bucket.terms.AbstractTermsParser;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
@ -43,12 +43,12 @@ import java.util.Map;
* *
*/ */
public class SignificantTermsParser extends AbstractTermsParser { public class SignificantTermsParser extends AbstractTermsParser {
private final SignificanceHeuristicParserMapper significanceHeuristicParserMapper; private final ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry;
private final IndicesQueriesRegistry queriesRegistry; private final IndicesQueriesRegistry queriesRegistry;
public SignificantTermsParser(SignificanceHeuristicParserMapper significanceHeuristicParserMapper, public SignificantTermsParser(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry,
IndicesQueriesRegistry queriesRegistry) { IndicesQueriesRegistry queriesRegistry) {
this.significanceHeuristicParserMapper = significanceHeuristicParserMapper; this.significanceHeuristicParserRegistry = significanceHeuristicParserRegistry;
this.queriesRegistry = queriesRegistry; this.queriesRegistry = queriesRegistry;
} }
@ -81,7 +81,8 @@ public class SignificantTermsParser extends AbstractTermsParser {
public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token, public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token,
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException { String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserMapper.get(currentFieldName); SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserRegistry
.lookupReturningNullIfNotFound(currentFieldName, parseFieldMatcher);
if (significanceHeuristicParser != null) { if (significanceHeuristicParser != null) {
SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(parser, parseFieldMatcher); SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(parser, parseFieldMatcher);
otherOptions.put(SignificantTermsAggregatorBuilder.HEURISTIC, significanceHeuristic); otherOptions.put(SignificantTermsAggregatorBuilder.HEURISTIC, significanceHeuristic);

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import java.io.IOException; import java.io.IOException;
@ -60,7 +59,8 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms<UnmappedS
public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) { public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
//We pass zero for index/subset sizes because for the purpose of significant term analysis //We pass zero for index/subset sizes because for the purpose of significant term analysis
// we assume an unmapped index's size is irrelevant to the proceedings. // we assume an unmapped index's size is irrelevant to the proceedings.
super(0, 0, name, requiredSize, minDocCount, JLHScore.PROTOTYPE, BUCKETS, pipelineAggregators, metaData); super(0, 0, name, requiredSize, minDocCount, SignificantTermsAggregatorBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC, BUCKETS,
pipelineAggregators, metaData);
} }
@Override @Override

View File

@ -28,15 +28,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
public class ChiSquare extends NXYSignificanceHeuristic { public class ChiSquare extends NXYSignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("chi_square");
static final ChiSquare PROTOTYPE = new ChiSquare(false, false);
protected static final ParseField NAMES_FIELD = new ParseField("chi_square");
public ChiSquare(boolean includeNegatives, boolean backgroundIsSuperset) { public ChiSquare(boolean includeNegatives, boolean backgroundIsSuperset) {
super(includeNegatives, backgroundIsSuperset); super(includeNegatives, backgroundIsSuperset);
} }
/**
* Read from a stream.
*/
public ChiSquare(StreamInput in) throws IOException {
super(in);
}
@Override @Override
public boolean equals(Object other) { public boolean equals(Object other) {
if (!(other instanceof ChiSquare)) { if (!(other instanceof ChiSquare)) {
@ -73,11 +77,6 @@ public class ChiSquare extends NXYSignificanceHeuristic {
return NAMES_FIELD.getPreferredName(); return NAMES_FIELD.getPreferredName();
} }
@Override
public SignificanceHeuristic readFrom(StreamInput in) throws IOException {
return new ChiSquare(in.readBoolean(), in.readBoolean());
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()); builder.startObject(NAMES_FIELD.getPreferredName());
@ -86,18 +85,12 @@ public class ChiSquare extends NXYSignificanceHeuristic {
return builder; return builder;
} }
public static class ChiSquareParser extends NXYParser { public static final SignificanceHeuristicParser PARSER = new NXYParser() {
@Override @Override
protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) { protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
return new ChiSquare(includeNegatives, backgroundIsSuperset); return new ChiSquare(includeNegatives, backgroundIsSuperset);
} }
};
@Override
public String[] getNames() {
return NAMES_FIELD.getAllNamesIncludedDeprecated();
}
}
public static class ChiSquareBuilder extends NXYSignificanceHeuristic.NXYBuilder { public static class ChiSquareBuilder extends NXYSignificanceHeuristic.NXYBuilder {

View File

@ -33,15 +33,23 @@ import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
public class GND extends NXYSignificanceHeuristic { public class GND extends NXYSignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("gnd");
static final GND PROTOTYPE = new GND(false);
protected static final ParseField NAMES_FIELD = new ParseField("gnd");
public GND(boolean backgroundIsSuperset) { public GND(boolean backgroundIsSuperset) {
super(true, backgroundIsSuperset); super(true, backgroundIsSuperset);
} }
/**
* Read from a stream.
*/
public GND(StreamInput in) throws IOException {
super(true, in.readBoolean());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(backgroundIsSuperset);
}
@Override @Override
public boolean equals(Object other) { public boolean equals(Object other) {
@ -91,16 +99,6 @@ public class GND extends NXYSignificanceHeuristic {
return NAMES_FIELD.getPreferredName(); return NAMES_FIELD.getPreferredName();
} }
@Override
public SignificanceHeuristic readFrom(StreamInput in) throws IOException {
return new GND(in.readBoolean());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(backgroundIsSuperset);
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()); builder.startObject(NAMES_FIELD.getPreferredName());
@ -109,13 +107,7 @@ public class GND extends NXYSignificanceHeuristic {
return builder; return builder;
} }
public static class GNDParser extends NXYParser { public static final SignificanceHeuristicParser PARSER = new NXYParser() {
@Override
public String[] getNames() {
return NAMES_FIELD.getAllNamesIncludedDeprecated();
}
@Override @Override
protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) { protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
return new GND(backgroundIsSuperset); return new GND(backgroundIsSuperset);
@ -138,8 +130,7 @@ public class GND extends NXYSignificanceHeuristic {
} }
return newHeuristic(true, backgroundIsSuperset); return newHeuristic(true, backgroundIsSuperset);
} }
};
}
public static class GNDBuilder extends NXYBuilder { public static class GNDBuilder extends NXYBuilder {

View File

@ -33,22 +33,16 @@ import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
public class JLHScore extends SignificanceHeuristic { public class JLHScore extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("jlh");
public static final JLHScore PROTOTYPE = new JLHScore();
protected static final ParseField NAMES_FIELD = new ParseField("jlh");
public JLHScore() { public JLHScore() {
} }
@Override /**
public String getWriteableName() { * Read from a stream.
return NAMES_FIELD.getPreferredName(); */
} public JLHScore(StreamInput in) {
// Nothing to read.
@Override
public SignificanceHeuristic readFrom(StreamInput in) throws IOException {
return PROTOTYPE;
} }
@Override @Override
@ -56,9 +50,8 @@ public class JLHScore extends SignificanceHeuristic {
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public String getWriteableName() {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject(); return NAMES_FIELD.getPreferredName();
return builder;
} }
/** /**
@ -106,22 +99,34 @@ public class JLHScore extends SignificanceHeuristic {
return absoluteProbabilityChange * relativeProbabilityChange; return absoluteProbabilityChange * relativeProbabilityChange;
} }
public static class JLHScoreParser implements SignificanceHeuristicParser { @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject();
return builder;
}
@Override public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryShardException {
throws IOException, QueryShardException { // move to the closing bracket
// move to the closing bracket if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) { throw new ElasticsearchParseException(
throw new ElasticsearchParseException("failed to parse [jlh] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken()); "failed to parse [jlh] significance heuristic. expected an empty object, but found [{}] instead",
} parser.currentToken());
return PROTOTYPE;
} }
return new JLHScore();
}
@Override @Override
public String[] getNames() { public boolean equals(Object obj) {
return NAMES_FIELD.getAllNamesIncludedDeprecated(); if (obj == null || obj.getClass() != getClass()) {
return false;
} }
return true;
}
@Override
public int hashCode() {
return getClass().hashCode();
} }
public static class JLHScoreBuilder implements SignificanceHeuristicBuilder { public static class JLHScoreBuilder implements SignificanceHeuristicBuilder {

View File

@ -28,10 +28,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
public class MutualInformation extends NXYSignificanceHeuristic { public class MutualInformation extends NXYSignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("mutual_information");
static final MutualInformation PROTOTYPE = new MutualInformation(false, false);
protected static final ParseField NAMES_FIELD = new ParseField("mutual_information");
private static final double log2 = Math.log(2.0); private static final double log2 = Math.log(2.0);
@ -39,6 +36,14 @@ public class MutualInformation extends NXYSignificanceHeuristic {
super(includeNegatives, backgroundIsSuperset); super(includeNegatives, backgroundIsSuperset);
} }
/**
* Read from a stream.
*/
public MutualInformation(StreamInput in) throws IOException {
super(in);
}
@Override @Override
public boolean equals(Object other) { public boolean equals(Object other) {
if (!(other instanceof MutualInformation)) { if (!(other instanceof MutualInformation)) {
@ -106,11 +111,6 @@ public class MutualInformation extends NXYSignificanceHeuristic {
return NAMES_FIELD.getPreferredName(); return NAMES_FIELD.getPreferredName();
} }
@Override
public SignificanceHeuristic readFrom(StreamInput in) throws IOException {
return new MutualInformation(in.readBoolean(), in.readBoolean());
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()); builder.startObject(NAMES_FIELD.getPreferredName());
@ -119,18 +119,12 @@ public class MutualInformation extends NXYSignificanceHeuristic {
return builder; return builder;
} }
public static class MutualInformationParser extends NXYParser { public static SignificanceHeuristicParser PARSER = new NXYParser() {
@Override @Override
protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) { protected SignificanceHeuristic newHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
return new MutualInformation(includeNegatives, backgroundIsSuperset); return new MutualInformation(includeNegatives, backgroundIsSuperset);
} }
};
@Override
public String[] getNames() {
return NAMES_FIELD.getAllNamesIncludedDeprecated();
}
}
public static class MutualInformationBuilder extends NXYBuilder { public static class MutualInformationBuilder extends NXYBuilder {

View File

@ -24,6 +24,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -48,11 +49,19 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
*/ */
protected final boolean includeNegatives; protected final boolean includeNegatives;
public NXYSignificanceHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) { protected NXYSignificanceHeuristic(boolean includeNegatives, boolean backgroundIsSuperset) {
this.includeNegatives = includeNegatives; this.includeNegatives = includeNegatives;
this.backgroundIsSuperset = backgroundIsSuperset; this.backgroundIsSuperset = backgroundIsSuperset;
} }
/**
* Read from a stream.
*/
protected NXYSignificanceHeuristic(StreamInput in) throws IOException {
includeNegatives = in.readBoolean();
backgroundIsSuperset = in.readBoolean();
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(includeNegatives); out.writeBoolean(includeNegatives);

View File

@ -33,34 +33,39 @@ import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException; import java.io.IOException;
public class PercentageScore extends SignificanceHeuristic { public class PercentageScore extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("percentage");
public static final PercentageScore PROTOTYPE = new PercentageScore();
protected static final ParseField NAMES_FIELD = new ParseField("percentage");
public PercentageScore() { public PercentageScore() {
} }
public PercentageScore(StreamInput in) {
// Nothing to read.
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override @Override
public String getWriteableName() { public String getWriteableName() {
return NAMES_FIELD.getPreferredName(); return NAMES_FIELD.getPreferredName();
} }
@Override
public SignificanceHeuristic readFrom(StreamInput in) throws IOException {
return PROTOTYPE;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()).endObject(); builder.startObject(NAMES_FIELD.getPreferredName()).endObject();
return builder; return builder;
} }
public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
throws IOException, QueryShardException {
// move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());
}
return new PercentageScore();
}
/** /**
* Indicates the significance of a term in a sample by determining what percentage * Indicates the significance of a term in a sample by determining what percentage
* of all occurrences of a term are found in the sample. * of all occurrences of a term are found in the sample.
@ -73,24 +78,19 @@ public class PercentageScore extends SignificanceHeuristic {
return 0; return 0;
} }
return (double) subsetFreq / (double) supersetFreq; return (double) subsetFreq / (double) supersetFreq;
} }
public static class PercentageScoreParser implements SignificanceHeuristicParser { @Override
public boolean equals(Object obj) {
@Override if (obj == null || obj.getClass() != getClass()) {
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) return false;
throws IOException, QueryShardException {
// move to the closing bracket
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());
}
return PROTOTYPE;
} }
return true;
}
@Override @Override
public String[] getNames() { public int hashCode() {
return NAMES_FIELD.getAllNamesIncludedDeprecated(); return getClass().hashCode();
}
} }
public static class PercentageScoreBuilder implements SignificanceHeuristicBuilder { public static class PercentageScoreBuilder implements SignificanceHeuristicBuilder {

View File

@ -47,16 +47,14 @@ import java.util.Map;
import java.util.Objects; import java.util.Objects;
public class ScriptHeuristic extends SignificanceHeuristic { public class ScriptHeuristic extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("script_heuristic");
static final ScriptHeuristic PROTOTYPE = new ScriptHeuristic(null);
protected static final ParseField NAMES_FIELD = new ParseField("script_heuristic");
private final LongAccessor subsetSizeHolder; private final LongAccessor subsetSizeHolder;
private final LongAccessor supersetSizeHolder; private final LongAccessor supersetSizeHolder;
private final LongAccessor subsetDfHolder; private final LongAccessor subsetDfHolder;
private final LongAccessor supersetDfHolder; private final LongAccessor supersetDfHolder;
private final Script script;
ExecutableScript searchScript = null; ExecutableScript searchScript = null;
Script script;
public ScriptHeuristic(Script script) { public ScriptHeuristic(Script script) {
subsetSizeHolder = new LongAccessor(); subsetSizeHolder = new LongAccessor();
@ -64,8 +62,18 @@ public class ScriptHeuristic extends SignificanceHeuristic {
subsetDfHolder = new LongAccessor(); subsetDfHolder = new LongAccessor();
supersetDfHolder = new LongAccessor(); supersetDfHolder = new LongAccessor();
this.script = script; this.script = script;
}
/**
* Read from a stream.
*/
public ScriptHeuristic(StreamInput in) throws IOException {
this(Script.readScript(in));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
script.writeTo(out);
} }
@Override @Override
@ -117,17 +125,6 @@ public class ScriptHeuristic extends SignificanceHeuristic {
return NAMES_FIELD.getPreferredName(); return NAMES_FIELD.getPreferredName();
} }
@Override
public SignificanceHeuristic readFrom(StreamInput in) throws IOException {
Script script = Script.readScript(in);
return new ScriptHeuristic(script);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
script.writeTo(out);
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params builderParams) throws IOException {
builder.startObject(NAMES_FIELD.getPreferredName()); builder.startObject(NAMES_FIELD.getPreferredName());
@ -154,58 +151,46 @@ public class ScriptHeuristic extends SignificanceHeuristic {
return Objects.equals(script, other.script); return Objects.equals(script, other.script);
} }
public static class ScriptHeuristicParser implements SignificanceHeuristicParser { public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
throws IOException, QueryShardException {
public ScriptHeuristicParser() { String heuristicName = parser.currentName();
} Script script = null;
XContentParser.Token token;
@Override Map<String, Object> params = null;
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) String currentFieldName = null;
throws IOException, QueryShardException { ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
String heuristicName = parser.currentName(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
Script script = null; if (token.equals(XContentParser.Token.FIELD_NAME)) {
XContentParser.Token token; currentFieldName = parser.currentName();
Map<String, Object> params = null; } else if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null; if (parseFieldMatcher.match(currentFieldName, ScriptField.SCRIPT)) {
ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); script = Script.parse(parser, parseFieldMatcher);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { } else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs)
if (token.equals(XContentParser.Token.FIELD_NAME)) { params = parser.map();
currentFieldName = parser.currentName(); } else {
} else if (token == XContentParser.Token.START_OBJECT) { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
if (parseFieldMatcher.match(currentFieldName, ScriptField.SCRIPT)) {
script = Script.parse(parser, parseFieldMatcher);
} else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs)
params = parser.map();
} else {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown object [{}]", heuristicName, currentFieldName);
}
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseFieldMatcher)) {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", heuristicName, currentFieldName);
} }
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseFieldMatcher)) {
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. unknown field [{}]", heuristicName, currentFieldName);
} }
}
if (script == null) { // Didn't find anything using the new API so try using the old one instead if (script == null) { // Didn't find anything using the new API so try using the old one instead
ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue();
if (scriptValue != null) { if (scriptValue != null) {
if (params == null) { if (params == null) {
params = new HashMap<>(); params = new HashMap<>();
}
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
} }
} else if (params != null) { script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. script params must be specified inside script object", heuristicName);
} }
} else if (params != null) {
if (script == null) { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. script params must be specified inside script object", heuristicName);
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. no script found in script_heuristic", heuristicName);
}
return new ScriptHeuristic(script);
} }
@Override if (script == null) {
public String[] getNames() { throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. no script found in script_heuristic", heuristicName);
return NAMES_FIELD.getAllNamesIncludedDeprecated();
} }
return new ScriptHeuristic(script);
} }
public static class ScriptHeuristicBuilder implements SignificanceHeuristicBuilder { public static class ScriptHeuristicBuilder implements SignificanceHeuristicBuilder {

View File

@ -26,10 +26,11 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
/**
* Parses {@link SignificanceHeuristic}s from an {@link XContentParser}.
*/
@FunctionalInterface
public interface SignificanceHeuristicParser { public interface SignificanceHeuristicParser {
SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException,
ParsingException; ParsingException;
String[] getNames();
} }

View File

@ -1,58 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.inject.Inject;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class SignificanceHeuristicParserMapper {
protected final Map<String, SignificanceHeuristicParser> significanceHeuristicParsers;
@Inject
public SignificanceHeuristicParserMapper(Set<SignificanceHeuristicParser> parsers) {
Map<String, SignificanceHeuristicParser> map = new HashMap<>();
add(map, new JLHScore.JLHScoreParser());
add(map, new PercentageScore.PercentageScoreParser());
add(map, new MutualInformation.MutualInformationParser());
add(map, new ChiSquare.ChiSquareParser());
add(map, new GND.GNDParser());
add(map, new ScriptHeuristic.ScriptHeuristicParser());
for (SignificanceHeuristicParser parser : parsers) {
add(map, parser);
}
significanceHeuristicParsers = Collections.unmodifiableMap(map);
}
public SignificanceHeuristicParser get(String parserName) {
return significanceHeuristicParsers.get(parserName);
}
private void add(Map<String, SignificanceHeuristicParser> map, SignificanceHeuristicParser parser) {
for (String type : parser.getNames()) {
map.put(type, parser);
}
}
}

View File

@ -1,93 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* A registry for all significance heuristics. This is needed for reading them from a stream without knowing which
* one it is.
*/
public class SignificanceHeuristicStreams {
private static Map<String, SignificanceHeuristic> STREAMS = Collections.emptyMap();
static {
HashMap<String, SignificanceHeuristic> map = new HashMap<>();
map.put(JLHScore.NAMES_FIELD.getPreferredName(), JLHScore.PROTOTYPE);
map.put(PercentageScore.NAMES_FIELD.getPreferredName(), PercentageScore.PROTOTYPE);
map.put(MutualInformation.NAMES_FIELD.getPreferredName(), MutualInformation.PROTOTYPE);
map.put(GND.NAMES_FIELD.getPreferredName(), GND.PROTOTYPE);
map.put(ChiSquare.NAMES_FIELD.getPreferredName(), ChiSquare.PROTOTYPE);
map.put(ScriptHeuristic.NAMES_FIELD.getPreferredName(), ScriptHeuristic.PROTOTYPE);
STREAMS = Collections.unmodifiableMap(map);
}
public static SignificanceHeuristic read(StreamInput in) throws IOException {
return stream(in.readString()).readFrom(in);
}
public static void writeTo(SignificanceHeuristic significanceHeuristic, StreamOutput out) throws IOException {
out.writeString(significanceHeuristic.getWriteableName());
significanceHeuristic.writeTo(out);
}
/**
* A stream that knows how to read an heuristic from the input.
*/
public static interface Stream {
SignificanceHeuristic readResult(StreamInput in) throws IOException;
String getName();
}
/**
* Registers the given prototype.
*
* @param prototype
* The prototype to register
*/
public static synchronized void registerPrototype(SignificanceHeuristic prototype) {
if (STREAMS.containsKey(prototype.getWriteableName())) {
throw new IllegalArgumentException("Can't register stream with name [" + prototype.getWriteableName() + "] more than once");
}
HashMap<String, SignificanceHeuristic> map = new HashMap<>();
map.putAll(STREAMS);
map.put(prototype.getWriteableName(), prototype);
STREAMS = Collections.unmodifiableMap(map);
}
/**
* Returns the stream that is registered for the given name
*
* @param name The given name
* @return The associated stream
*/
private static synchronized SignificanceHeuristic stream(String name) {
return STREAMS.get(name);
}
}

View File

@ -45,8 +45,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicStreams;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
@ -63,11 +61,11 @@ import java.util.concurrent.ExecutionException;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms;
import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
@ -89,6 +87,11 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
return pluginList(CustomSignificanceHeuristicPlugin.class); return pluginList(CustomSignificanceHeuristicPlugin.class);
} }
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return pluginList(CustomSignificanceHeuristicPlugin.class);
}
public String randomExecutionHint() { public String randomExecutionHint() {
return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString(); return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString();
} }
@ -162,11 +165,6 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
} }
public static class CustomSignificanceHeuristicPlugin extends Plugin { public static class CustomSignificanceHeuristicPlugin extends Plugin {
static {
SignificanceHeuristicStreams.registerPrototype(SimpleHeuristic.PROTOTYPE);
}
@Override @Override
public String name() { public String name() {
return "test-plugin-significance-heuristic"; return "test-plugin-significance-heuristic";
@ -177,9 +175,10 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
return "Significance heuristic plugin"; return "Significance heuristic plugin";
} }
public void onModule(SearchModule significanceModule) { public void onModule(SearchModule searchModule) {
significanceModule.registerHeuristicParser(new SimpleHeuristic.SimpleHeuristicParser()); searchModule.registerSignificanceHeuristic(SimpleHeuristic.NAMES_FIELD, SimpleHeuristic::new, SimpleHeuristic::parse);
} }
public void onModule(ScriptModule module) { public void onModule(ScriptModule module) {
module.registerScript(NativeSignificanceScoreScriptNoParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_NO_PARAMS, NativeSignificanceScoreScriptNoParams.Factory.class); module.registerScript(NativeSignificanceScoreScriptNoParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_NO_PARAMS, NativeSignificanceScoreScriptNoParams.Factory.class);
module.registerScript(NativeSignificanceScoreScriptWithParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_WITH_PARAMS, NativeSignificanceScoreScriptWithParams.Factory.class); module.registerScript(NativeSignificanceScoreScriptWithParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_WITH_PARAMS, NativeSignificanceScoreScriptWithParams.Factory.class);
@ -187,23 +186,26 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
} }
public static class SimpleHeuristic extends SignificanceHeuristic { public static class SimpleHeuristic extends SignificanceHeuristic {
public static final ParseField NAMES_FIELD = new ParseField("simple");
static final SimpleHeuristic PROTOTYPE = new SimpleHeuristic(); public SimpleHeuristic() {
protected static final ParseField NAMES_FIELD = new ParseField("simple");
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
} }
@Override /**
public SignificanceHeuristic readFrom(StreamInput in) throws IOException { * Read from a stream.
return new SimpleHeuristic(); */
public SimpleHeuristic(StreamInput in) throws IOException {
// Nothing to read
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
// Nothing to write
}
@Override
public String getWriteableName() {
return NAMES_FIELD.getPreferredName();
} }
@Override @Override
@ -240,19 +242,10 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0; return subsetFreq / subsetSize > supersetFreq / supersetSize ? 2.0 : 1.0;
} }
public static class SimpleHeuristicParser implements SignificanceHeuristicParser { public static SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher)
throws IOException, QueryShardException {
@Override parser.nextToken();
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) return new SimpleHeuristic();
throws IOException, QueryShardException {
parser.nextToken();
return new SimpleHeuristic();
}
@Override
public String[] getNames() {
return NAMES_FIELD.getAllNamesIncludedDeprecated();
}
} }
} }

View File

@ -193,7 +193,7 @@ public class SignificantTermsTests extends BaseAggregationTestCase<SignificantTe
SignificanceHeuristic significanceHeuristic = null; SignificanceHeuristic significanceHeuristic = null;
switch (randomInt(5)) { switch (randomInt(5)) {
case 0: case 0:
significanceHeuristic = PercentageScore.PROTOTYPE; significanceHeuristic = new PercentageScore();
break; break;
case 1: case 1:
significanceHeuristic = new ChiSquare(randomBoolean(), randomBoolean()); significanceHeuristic = new ChiSquare(randomBoolean(), randomBoolean());
@ -208,7 +208,7 @@ public class SignificantTermsTests extends BaseAggregationTestCase<SignificantTe
significanceHeuristic = new ScriptHeuristic(new Script("foo")); significanceHeuristic = new ScriptHeuristic(new Script("foo"));
break; break;
case 5: case 5:
significanceHeuristic = JLHScore.PROTOTYPE; significanceHeuristic = new JLHScore();
break; break;
default: default:
fail(); fail();

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -47,7 +48,6 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Mutua
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -60,9 +60,7 @@ import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms;
import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.elasticsearch.test.VersionUtils.randomVersion;
@ -143,7 +141,7 @@ public class SignificanceHeuristicTests extends ESTestCase {
SignificanceHeuristic getRandomSignificanceheuristic() { SignificanceHeuristic getRandomSignificanceheuristic() {
List<SignificanceHeuristic> heuristics = new ArrayList<>(); List<SignificanceHeuristic> heuristics = new ArrayList<>();
heuristics.add(JLHScore.PROTOTYPE); heuristics.add(new JLHScore());
heuristics.add(new MutualInformation(randomBoolean(), randomBoolean())); heuristics.add(new MutualInformation(randomBoolean(), randomBoolean()));
heuristics.add(new GND(randomBoolean())); heuristics.add(new GND(randomBoolean()));
heuristics.add(new ChiSquare(randomBoolean(), randomBoolean())); heuristics.add(new ChiSquare(randomBoolean(), randomBoolean()));
@ -204,9 +202,8 @@ public class SignificanceHeuristicTests extends ESTestCase {
// 1. The output of the builders can actually be parsed // 1. The output of the builders can actually be parsed
// 2. The parser does not swallow parameters after a significance heuristic was defined // 2. The parser does not swallow parameters after a significance heuristic was defined
public void testBuilderAndParser() throws Exception { public void testBuilderAndParser() throws Exception {
SearchModule searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry());
Set<SignificanceHeuristicParser> parsers = new HashSet<>(); ParseFieldRegistry<SignificanceHeuristicParser> heuristicParserMapper = searchModule.getSignificanceHeuristicParserRegistry();
SignificanceHeuristicParserMapper heuristicParserMapper = new SignificanceHeuristicParserMapper(parsers);
SearchContext searchContext = new SignificantTermsTestSearchContext(); SearchContext searchContext = new SignificantTermsTestSearchContext();
// test jlh with string // test jlh with string
@ -243,37 +240,39 @@ public class SignificanceHeuristicTests extends ESTestCase {
checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError);
} }
protected void checkParseException(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, protected void checkParseException(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry,
String faultyHeuristicDefinition, String expectedError) throws IOException { SearchContext searchContext, String faultyHeuristicDefinition, String expectedError) throws IOException {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); IndicesQueriesRegistry registry = new IndicesQueriesRegistry();
try { try {
XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}");
QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT); QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT);
stParser.nextToken(); stParser.nextToken();
new SignificantTermsParser(heuristicParserMapper, registry).parse("testagg", parseContext); new SignificantTermsParser(significanceHeuristicParserRegistry, registry).parse("testagg", parseContext);
fail(); fail();
} catch (ElasticsearchParseException e) { } catch (ElasticsearchParseException e) {
assertTrue(e.getMessage().contains(expectedError)); assertTrue(e.getMessage().contains(expectedError));
} }
} }
protected SignificanceHeuristic parseFromBuilder(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, SignificanceHeuristic significanceHeuristic) throws IOException { protected SignificanceHeuristic parseFromBuilder(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry,
SearchContext searchContext, SignificanceHeuristic significanceHeuristic) throws IOException {
SignificantTermsAggregatorBuilder stBuilder = significantTerms("testagg"); SignificantTermsAggregatorBuilder stBuilder = significantTerms("testagg");
stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200); stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200);
XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder();
stBuilder.internalXContent(stXContentBuilder, null); stBuilder.internalXContent(stXContentBuilder, null);
XContentParser stParser = JsonXContent.jsonXContent.createParser(stXContentBuilder.string()); XContentParser stParser = JsonXContent.jsonXContent.createParser(stXContentBuilder.string());
return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser); return parseSignificanceHeuristic(significanceHeuristicParserRegistry, searchContext, stParser);
} }
private SignificanceHeuristic parseSignificanceHeuristic(SignificanceHeuristicParserMapper heuristicParserMapper, private SignificanceHeuristic parseSignificanceHeuristic(
SearchContext searchContext, XContentParser stParser) throws IOException { ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry, SearchContext searchContext,
XContentParser stParser) throws IOException {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); IndicesQueriesRegistry registry = new IndicesQueriesRegistry();
QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT); QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT);
stParser.nextToken(); stParser.nextToken();
SignificantTermsAggregatorBuilder aggregatorFactory = (SignificantTermsAggregatorBuilder) new SignificantTermsParser( SignificantTermsAggregatorBuilder aggregatorFactory = (SignificantTermsAggregatorBuilder) new SignificantTermsParser(
heuristicParserMapper, registry).parse("testagg", parseContext); significanceHeuristicParserRegistry, registry).parse("testagg", parseContext);
stParser.nextToken(); stParser.nextToken();
assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L)); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200L));
assertThat(stParser.currentToken(), equalTo(null)); assertThat(stParser.currentToken(), equalTo(null));
@ -281,9 +280,10 @@ public class SignificanceHeuristicTests extends ESTestCase {
return aggregatorFactory.significanceHeuristic(); return aggregatorFactory.significanceHeuristic();
} }
protected SignificanceHeuristic parseFromString(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String heuristicString) throws IOException { protected SignificanceHeuristic parseFromString(ParseFieldRegistry<SignificanceHeuristicParser> significanceHeuristicParserRegistry,
SearchContext searchContext, String heuristicString) throws IOException {
XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}");
return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser); return parseSignificanceHeuristic(significanceHeuristicParserRegistry, searchContext, stParser);
} }
void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) { void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) {
@ -389,14 +389,14 @@ public class SignificanceHeuristicTests extends ESTestCase {
testBackgroundAssertions(new MutualInformation(true, true), new MutualInformation(true, false)); testBackgroundAssertions(new MutualInformation(true, true), new MutualInformation(true, false));
testBackgroundAssertions(new ChiSquare(true, true), new ChiSquare(true, false)); testBackgroundAssertions(new ChiSquare(true, true), new ChiSquare(true, false));
testBackgroundAssertions(new GND(true), new GND(false)); testBackgroundAssertions(new GND(true), new GND(false));
testAssertions(PercentageScore.PROTOTYPE); testAssertions(new PercentageScore());
testAssertions(JLHScore.PROTOTYPE); testAssertions(new JLHScore());
} }
public void testBasicScoreProperties() { public void testBasicScoreProperties() {
basicScoreProperties(JLHScore.PROTOTYPE, true); basicScoreProperties(new JLHScore(), true);
basicScoreProperties(new GND(true), true); basicScoreProperties(new GND(true), true);
basicScoreProperties(PercentageScore.PROTOTYPE, true); basicScoreProperties(new PercentageScore(), true);
basicScoreProperties(new MutualInformation(true, true), false); basicScoreProperties(new MutualInformation(true, true), false);
basicScoreProperties(new ChiSquare(true, true), false); basicScoreProperties(new ChiSquare(true, true), false);
} }

View File

@ -62,6 +62,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.rest.client.http.HttpResponse; import org.elasticsearch.test.rest.client.http.HttpResponse;
import org.hamcrest.CoreMatchers; import org.hamcrest.CoreMatchers;
@ -647,8 +648,19 @@ public class ElasticsearchAssertions {
} }
public static void assertVersionSerializable(Version version, Streamable streamable) { public static void assertVersionSerializable(Version version, Streamable streamable) {
NamedWriteableRegistry registry = new NamedWriteableRegistry(); /*
new SearchModule(Settings.EMPTY, registry); // populates the registry through side effects * If possible we fetch the NamedWriteableRegistry from the test cluster. That is the only way to make sure that we properly handle
* when plugins register names. If not possible we'll try and set up a registry based on whatever SearchModule registers. But that
* is a hack at best - it only covers some things. If you end up with errors below and get to this comment I'm sorry. Please find
* a way that sucks less.
*/
NamedWriteableRegistry registry;
if (ESIntegTestCase.isInternalCluster()) {
registry = ESIntegTestCase.internalCluster().getInstance(NamedWriteableRegistry.class);
} else {
registry = new NamedWriteableRegistry();
new SearchModule(Settings.EMPTY, registry);
}
assertVersionSerializable(version, streamable, registry); assertVersionSerializable(version, streamable, registry);
} }