Remove PROTOTYPEs from suggesters

Also stops using guice for suggesters at all and lots of checkstyle.
This commit is contained in:
Nik Everett 2016-03-28 11:06:03 -04:00
parent 101a32573c
commit df08854c60
67 changed files with 643 additions and 900 deletions

View File

@ -860,19 +860,14 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestBuilder.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestContextParser.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestContextParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestUtils.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestUtils.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]Suggesters.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggestParser.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]ContextMapping.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]ContextMapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoContextMapping.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoContextMapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoQueryContext.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoQueryContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]CandidateScorer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]CandidateScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]DirectCandidateGenerator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]LaplaceScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]LinearInterpoatingScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellChecker.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellChecker.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestParser.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]StupidBackoffScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]WordScorer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]WordScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggestParser.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RestoreService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RestoreService.java" checks="LineLength" />
@ -1377,7 +1372,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggester.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CategoryContextMappingTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CategoryContextMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]GeoContextMappingTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]GeoContextMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]DirectCandidateGeneratorTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellCheckerTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellCheckerTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]similarity[/\\]SimilarityIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]similarity[/\\]SimilarityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]AbstractSnapshotIntegTestCase.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]AbstractSnapshotIntegTestCase.java" checks="LineLength" />

View File

@ -115,6 +115,19 @@ public abstract class StreamInput extends InputStream {
return readBytesReference(length); return readBytesReference(length);
} }
/**
* Reads an optional bytes reference from this stream. It might hold an actual reference to the underlying bytes of the stream. Use this
* only if you must differentiate null from empty. Use {@link StreamInput#readBytesReference()} and
* {@link StreamOutput#writeBytesReference(BytesReference)} if you do not.
*/
public BytesReference readOptionalBytesReference() throws IOException {
int length = readVInt() - 1;
if (length < 0) {
return null;
}
return readBytesReference(length);
}
/** /**
* Reads a bytes reference from this stream, might hold an actual reference to the underlying * Reads a bytes reference from this stream, might hold an actual reference to the underlying
* bytes of the stream. * bytes of the stream.

View File

@ -146,6 +146,19 @@ public abstract class StreamOutput extends OutputStream {
bytes.writeTo(this); bytes.writeTo(this);
} }
/**
* Writes an optional bytes reference including a length header. Use this if you need to differentiate between null and empty bytes
* references. Use {@link #writeBytesReference(BytesReference)} and {@link StreamInput#readBytesReference()} if you do not.
*/
public void writeOptionalBytesReference(@Nullable BytesReference bytes) throws IOException {
if (bytes == null) {
writeVInt(0);
return;
}
writeVInt(bytes.length() + 1);
bytes.writeTo(this);
}
public void writeBytesRef(BytesRef bytes) throws IOException { public void writeBytesRef(BytesRef bytes) throws IOException {
if (bytes == null) { if (bytes == null) {
writeVInt(0); writeVInt(0);

View File

@ -46,7 +46,8 @@ public interface Writeable<T> extends StreamableReader<T> { // TODO remove exten
@Override @Override
default T readFrom(StreamInput in) throws IOException { default T readFrom(StreamInput in) throws IOException {
// See class javadoc for reasoning // See class javadoc for reasoning
throw new UnsupportedOperationException("Prefer calling a constructor that takes a StreamInput to calling readFrom."); throw new UnsupportedOperationException(
"Prefer calling a constructor or static method that takes a StreamInput to calling readFrom.");
} }
/** /**
@ -56,6 +57,9 @@ public interface Writeable<T> extends StreamableReader<T> { // TODO remove exten
*/ */
@FunctionalInterface @FunctionalInterface
interface Reader<R> { interface Reader<R> {
R read(StreamInput t) throws IOException; /**
* Read R from a stream.
*/
R read(StreamInput in) throws IOException;
} }
} }

View File

@ -249,7 +249,7 @@ public class SearchModule extends AbstractModule {
private final Set<Aggregator.Parser> aggParsers = new HashSet<>(); private final Set<Aggregator.Parser> aggParsers = new HashSet<>();
private final Set<PipelineAggregator.Parser> pipelineAggParsers = new HashSet<>(); private final Set<PipelineAggregator.Parser> pipelineAggParsers = new HashSet<>();
private final Highlighters highlighters = new Highlighters(); private final Highlighters highlighters = new Highlighters();
private final Suggesters suggesters = new Suggesters(); private final Suggesters suggesters;
/** /**
* Function score parsers constructed on registration. This is ok because * Function score parsers constructed on registration. This is ok because
* they don't have any dependencies. * they don't have any dependencies.
@ -274,6 +274,7 @@ public class SearchModule extends AbstractModule {
public SearchModule(Settings settings, NamedWriteableRegistry namedWriteableRegistry) { public SearchModule(Settings settings, NamedWriteableRegistry namedWriteableRegistry) {
this.settings = settings; this.settings = settings;
this.namedWriteableRegistry = namedWriteableRegistry; this.namedWriteableRegistry = namedWriteableRegistry;
suggesters = new Suggesters(namedWriteableRegistry);
registerBuiltinFunctionScoreParsers(); registerBuiltinFunctionScoreParsers();
registerBuiltinQueryParsers(); registerBuiltinQueryParsers();
@ -286,8 +287,7 @@ public class SearchModule extends AbstractModule {
} }
public void registerSuggester(String key, Suggester<?> suggester) { public void registerSuggester(String key, Suggester<?> suggester) {
suggesters.registerExtension(key, suggester.getClass()); suggesters.register(key, suggester);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, suggester.getBuilderPrototype());
} }
/** /**
@ -337,10 +337,10 @@ public class SearchModule extends AbstractModule {
protected void configure() { protected void configure() {
IndicesQueriesRegistry indicesQueriesRegistry = buildQueryParserRegistry(); IndicesQueriesRegistry indicesQueriesRegistry = buildQueryParserRegistry();
bind(IndicesQueriesRegistry.class).toInstance(indicesQueriesRegistry); bind(IndicesQueriesRegistry.class).toInstance(indicesQueriesRegistry);
bind(Suggesters.class).toInstance(suggesters);
configureSearch(); configureSearch();
configureAggs(indicesQueriesRegistry); configureAggs(indicesQueriesRegistry);
configureHighlighters(); configureHighlighters();
configureSuggesters();
configureFetchSubPhase(); configureFetchSubPhase();
configureShapes(); configureShapes();
} }
@ -378,16 +378,6 @@ public class SearchModule extends AbstractModule {
return new IndicesQueriesRegistry(settings, queryParsersMap); return new IndicesQueriesRegistry(settings, queryParsersMap);
} }
protected void configureSuggesters() {
suggesters.bind(binder());
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
}
protected void configureHighlighters() { protected void configureHighlighters() {
highlighters.bind(binder()); highlighters.bind(binder());
} }
@ -615,4 +605,8 @@ public class SearchModule extends AbstractModule {
BucketSelectorPipelineAggregator.registerStreams(); BucketSelectorPipelineAggregator.registerStreams();
SerialDiffPipelineAggregator.registerStreams(); SerialDiffPipelineAggregator.registerStreams();
} }
public Suggesters getSuggesters() {
return suggesters;
}
} }

View File

@ -36,15 +36,12 @@ public enum SortBy implements Writeable<SortBy> {
/** Sort should first be based on document frequency, then score and then the term itself. */ /** Sort should first be based on document frequency, then score and then the term itself. */
FREQUENCY; FREQUENCY;
public static SortBy PROTOTYPE = SCORE;
@Override @Override
public void writeTo(final StreamOutput out) throws IOException { public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(ordinal()); out.writeVInt(ordinal());
} }
@Override public static SortBy readFromStream(final StreamInput in) throws IOException {
public SortBy readFrom(final StreamInput in) throws IOException {
int ordinal = in.readVInt(); int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) { if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown SortBy ordinal [" + ordinal + "]"); throw new IOException("Unknown SortBy ordinal [" + ordinal + "]");

View File

@ -21,18 +21,20 @@ package org.elasticsearch.search.suggest;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException; import java.io.IOException;
public abstract class Suggester<T extends SuggestionSearchContext.SuggestionContext> { public abstract class Suggester<T extends SuggestionSearchContext.SuggestionContext> implements Writeable.Reader<SuggestionBuilder<?>> {
protected abstract Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> protected abstract Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException; innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException;
/** /**
* link the suggester to its corresponding {@link SuggestionBuilder} * Read the SuggestionBuilder paired with this Suggester XContent.
*/ */
public abstract SuggestionBuilder<? extends SuggestionBuilder> getBuilderPrototype(); public abstract SuggestionBuilder<?> innerFromXContent(QueryParseContext context) throws IOException;
public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
execute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { execute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {

View File

@ -18,52 +18,50 @@
*/ */
package org.elasticsearch.search.suggest; package org.elasticsearch.search.suggest;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.phrase.Laplace;
import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.StupidBackoff;
import org.elasticsearch.search.suggest.term.TermSuggester; import org.elasticsearch.search.suggest.term.TermSuggester;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
/** /**
* *
*/ */
public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> { public final class Suggesters {
private final Map<String, Suggester> parsers; private final Map<String, Suggester<?>> suggesters = new HashMap<>();
private final NamedWriteableRegistry namedWriteableRegistry;
public Suggesters() { public Suggesters(NamedWriteableRegistry namedWriteableRegistry) {
this(Collections.emptyMap()); this.namedWriteableRegistry = namedWriteableRegistry;
register("phrase", PhraseSuggester.INSTANCE);
register("term", TermSuggester.INSTANCE);
register("completion", CompletionSuggester.INSTANCE);
// Builtin smoothing models
namedWriteableRegistry.register(SmoothingModel.class, Laplace.NAME, Laplace::new);
namedWriteableRegistry.register(SmoothingModel.class, LinearInterpolation.NAME, LinearInterpolation::new);
namedWriteableRegistry.register(SmoothingModel.class, StupidBackoff.NAME, StupidBackoff::new);
} }
@Inject public void register(String key, Suggester<?> suggester) {
public Suggesters(Map<String, Suggester> suggesters) { if (suggesters.containsKey(key)) {
super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestPhase.class); throw new IllegalArgumentException("Can't register the same [suggester] more than once for [" + key + "]");
this.parsers = Collections.unmodifiableMap(addBuildIns(suggesters)); }
suggesters.put(key, suggester);
namedWriteableRegistry.register(SuggestionBuilder.class, key, suggester);
} }
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters) { public Suggester<?> getSuggester(String suggesterName) {
final Map<String, Suggester> map = new HashMap<>(); Suggester<?> suggester = suggesters.get(suggesterName);
map.put("phrase", PhraseSuggester.PROTOTYPE);
map.put("term", TermSuggester.PROTOTYPE);
map.put("completion", CompletionSuggester.PROTOTYPE);
map.putAll(suggesters);
return map;
}
public SuggestionBuilder<? extends SuggestionBuilder> getSuggestionPrototype(String suggesterName) {
Suggester<?> suggester = parsers.get(suggesterName);
if (suggester == null) { if (suggester == null) {
throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported"); throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported");
} }
SuggestionBuilder<?> suggestParser = suggester.getBuilderPrototype(); return suggester;
if (suggestParser == null) {
throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported");
}
return suggestParser;
} }
} }

View File

@ -86,6 +86,34 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
shardSize = in.shardSize; shardSize = in.shardSize;
} }
/**
* Read from a stream.
*/
protected SuggestionBuilder(StreamInput in) throws IOException {
field = in.readString();
text = in.readOptionalString();
prefix = in.readOptionalString();
regex = in.readOptionalString();
analyzer = in.readOptionalString();
size = in.readOptionalVInt();
shardSize = in.readOptionalVInt();
}
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
out.writeOptionalString(text);
out.writeOptionalString(prefix);
out.writeOptionalString(regex);
out.writeOptionalString(analyzer);
out.writeOptionalVInt(size);
out.writeOptionalVInt(shardSize);
doWriteTo(out);
}
protected abstract void doWriteTo(StreamOutput out) throws IOException;
/** /**
* Same as in {@link SuggestBuilder#setGlobalText(String)}, but in the suggestion scope. * Same as in {@link SuggestBuilder#setGlobalText(String)}, but in the suggestion scope.
*/ */
@ -251,11 +279,7 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
throw new ParsingException(parser.getTokenLocation(), "suggestion does not support [" + currentFieldName + "]"); throw new ParsingException(parser.getTokenLocation(), "suggestion does not support [" + currentFieldName + "]");
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
SuggestionBuilder<?> suggestParser = suggesters.getSuggestionPrototype(currentFieldName); suggestionBuilder = suggesters.getSuggester(currentFieldName).innerFromXContent(parseContext);
if (suggestParser == null) {
throw new ParsingException(parser.getTokenLocation(), "suggestion [" + currentFieldName + "] not supported");
}
suggestionBuilder = suggestParser.innerFromXContent(parseContext);
} }
} }
if (suggestionBuilder == null) { if (suggestionBuilder == null) {
@ -273,8 +297,6 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
return suggestionBuilder; return suggestionBuilder;
} }
protected abstract SuggestionBuilder<T> innerFromXContent(QueryParseContext parseContext) throws IOException;
protected abstract SuggestionContext build(QueryShardContext context) throws IOException; protected abstract SuggestionContext build(QueryShardContext context) throws IOException;
/** /**
@ -340,40 +362,6 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
return getWriteableName(); return getWriteableName();
} }
@Override
public final T readFrom(StreamInput in) throws IOException {
String field = in.readString();
T suggestionBuilder = doReadFrom(in, field);
suggestionBuilder.text = in.readOptionalString();
suggestionBuilder.prefix = in.readOptionalString();
suggestionBuilder.regex = in.readOptionalString();
suggestionBuilder.analyzer = in.readOptionalString();
suggestionBuilder.size = in.readOptionalVInt();
suggestionBuilder.shardSize = in.readOptionalVInt();
return suggestionBuilder;
}
/**
* Subclass should return a new instance, reading itself from the input string
* @param in the input string to read from
* @param field the field needed for ctor or concrete suggestion
*/
protected abstract T doReadFrom(StreamInput in, String field) throws IOException;
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
doWriteTo(out);
out.writeOptionalString(text);
out.writeOptionalString(prefix);
out.writeOptionalString(regex);
out.writeOptionalString(analyzer);
out.writeOptionalVInt(size);
out.writeOptionalVInt(shardSize);
}
protected abstract void doWriteTo(StreamOutput out) throws IOException;
@Override @Override
public final boolean equals(Object obj) { public final boolean equals(Object obj) {
if (this == obj) { if (this == obj) {

View File

@ -30,12 +30,14 @@ import org.apache.lucene.search.suggest.document.TopSuggestDocs;
import org.apache.lucene.search.suggest.document.TopSuggestDocsCollector; import org.apache.lucene.search.suggest.document.TopSuggestDocsCollector;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggester;
@ -52,7 +54,9 @@ import java.util.Set;
public class CompletionSuggester extends Suggester<CompletionSuggestionContext> { public class CompletionSuggester extends Suggester<CompletionSuggestionContext> {
public static final CompletionSuggester PROTOTYPE = new CompletionSuggester(); public static final CompletionSuggester INSTANCE = new CompletionSuggester();
private CompletionSuggester() {}
@Override @Override
protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name, protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name,
@ -267,7 +271,12 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
} }
@Override @Override
public SuggestionBuilder<?> getBuilderPrototype() { public SuggestionBuilder<?> innerFromXContent(QueryParseContext context) throws IOException {
return CompletionSuggestionBuilder.PROTOTYPE; return CompletionSuggestionBuilder.innerFromXContent(context);
}
@Override
public SuggestionBuilder<?> read(StreamInput in) throws IOException {
return new CompletionSuggestionBuilder(in);
} }
} }

View File

@ -40,7 +40,6 @@ import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings; import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -57,8 +56,6 @@ import java.util.Objects;
* indexing. * indexing.
*/ */
public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSuggestionBuilder> { public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSuggestionBuilder> {
public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder("_na_");
static final String SUGGESTION_NAME = "completion"; static final String SUGGESTION_NAME = "completion";
static final ParseField PAYLOAD_FIELD = new ParseField("payload"); static final ParseField PAYLOAD_FIELD = new ParseField("payload");
static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context"); static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context");
@ -124,6 +121,26 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
payloadFields = in.payloadFields; payloadFields = in.payloadFields;
} }
/**
* Read from a stream.
*/
public CompletionSuggestionBuilder(StreamInput in) throws IOException {
super(in);
payloadFields = new ArrayList<>();
Collections.addAll(payloadFields, in.readStringArray());
fuzzyOptions = in.readOptionalWriteable(FuzzyOptions::new);
regexOptions = in.readOptionalWriteable(RegexOptions::new);
contextBytes = in.readOptionalBytesReference();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeStringArray(payloadFields.toArray(new String[payloadFields.size()]));
out.writeOptionalWriteable(fuzzyOptions);
out.writeOptionalWriteable(regexOptions);
out.writeOptionalBytesReference(contextBytes);
}
/** /**
* Sets the prefix to provide completions for. * Sets the prefix to provide completions for.
* The prefix gets analyzed by the suggest analyzer. * The prefix gets analyzed by the suggest analyzer.
@ -188,12 +205,12 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
* see {@link org.elasticsearch.search.suggest.completion.context.CategoryQueryContext} * see {@link org.elasticsearch.search.suggest.completion.context.CategoryQueryContext}
* and {@link org.elasticsearch.search.suggest.completion.context.GeoQueryContext} * and {@link org.elasticsearch.search.suggest.completion.context.GeoQueryContext}
*/ */
public CompletionSuggestionBuilder contexts(Map<String, List<? extends QueryContext>> queryContexts) { public CompletionSuggestionBuilder contexts(Map<String, List<? extends ToXContent>> queryContexts) {
Objects.requireNonNull(queryContexts, "contexts must not be null"); Objects.requireNonNull(queryContexts, "contexts must not be null");
try { try {
XContentBuilder contentBuilder = XContentFactory.jsonBuilder(); XContentBuilder contentBuilder = XContentFactory.jsonBuilder();
contentBuilder.startObject(); contentBuilder.startObject();
for (Map.Entry<String, List<? extends QueryContext>> contextEntry : queryContexts.entrySet()) { for (Map.Entry<String, List<? extends ToXContent>> contextEntry : queryContexts.entrySet()) {
contentBuilder.startArray(contextEntry.getKey()); contentBuilder.startArray(contextEntry.getKey());
for (ToXContent queryContext : contextEntry.getValue()) { for (ToXContent queryContext : contextEntry.getValue()) {
queryContext.toXContent(contentBuilder, EMPTY_PARAMS); queryContext.toXContent(contentBuilder, EMPTY_PARAMS);
@ -244,8 +261,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
return builder; return builder;
} }
@Override static CompletionSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
protected CompletionSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
CompletionSuggestionBuilder.InnerBuilder builder = new CompletionSuggestionBuilder.InnerBuilder(); CompletionSuggestionBuilder.InnerBuilder builder = new CompletionSuggestionBuilder.InnerBuilder();
TLP_PARSER.parse(parseContext.parser(), builder); TLP_PARSER.parse(parseContext.parser(), builder);
String field = builder.field; String field = builder.field;
@ -303,52 +319,6 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
return SUGGESTION_NAME; return SUGGESTION_NAME;
} }
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeBoolean(payloadFields.isEmpty() == false);
if (payloadFields.isEmpty() == false) {
out.writeVInt(payloadFields.size());
for (String payloadField : payloadFields) {
out.writeString(payloadField);
}
}
out.writeBoolean(fuzzyOptions != null);
if (fuzzyOptions != null) {
fuzzyOptions.writeTo(out);
}
out.writeBoolean(regexOptions != null);
if (regexOptions != null) {
regexOptions.writeTo(out);
}
out.writeBoolean(contextBytes != null);
if (contextBytes != null) {
out.writeBytesReference(contextBytes);
}
}
@Override
public CompletionSuggestionBuilder doReadFrom(StreamInput in, String field) throws IOException {
CompletionSuggestionBuilder completionSuggestionBuilder = new CompletionSuggestionBuilder(field);
if (in.readBoolean()) {
int numPayloadField = in.readVInt();
List<String> payloadFields = new ArrayList<>(numPayloadField);
for (int i = 0; i < numPayloadField; i++) {
payloadFields.add(in.readString());
}
completionSuggestionBuilder.payloadFields = payloadFields;
}
if (in.readBoolean()) {
completionSuggestionBuilder.fuzzyOptions = FuzzyOptions.readFuzzyOptions(in);
}
if (in.readBoolean()) {
completionSuggestionBuilder.regexOptions = RegexOptions.readRegexOptions(in);
}
if (in.readBoolean()) {
completionSuggestionBuilder.contextBytes = in.readBytesReference();
}
return completionSuggestionBuilder;
}
@Override @Override
protected boolean doEquals(CompletionSuggestionBuilder other) { protected boolean doEquals(CompletionSuggestionBuilder other) {
return Objects.equals(payloadFields, other.payloadFields) && return Objects.equals(payloadFields, other.payloadFields) &&

View File

@ -36,7 +36,7 @@ import java.util.Map;
public class CompletionSuggestionContext extends SuggestionSearchContext.SuggestionContext { public class CompletionSuggestionContext extends SuggestionSearchContext.SuggestionContext {
protected CompletionSuggestionContext(QueryShardContext shardContext) { protected CompletionSuggestionContext(QueryShardContext shardContext) {
super(CompletionSuggester.PROTOTYPE, shardContext); super(CompletionSuggester.INSTANCE, shardContext);
} }
private CompletionFieldMapper.CompletionFieldType fieldType; private CompletionFieldMapper.CompletionFieldType fieldType;

View File

@ -89,7 +89,26 @@ public class FuzzyOptions implements ToXContent, Writeable<FuzzyOptions> {
this.maxDeterminizedStates = maxDeterminizedStates; this.maxDeterminizedStates = maxDeterminizedStates;
} }
private FuzzyOptions() { /**
* Read from a stream.
*/
FuzzyOptions(StreamInput in) throws IOException {
transpositions = in.readBoolean();
unicodeAware = in.readBoolean();
editDistance = in.readVInt();
fuzzyMinLength = in.readVInt();
fuzzyPrefixLength = in.readVInt();
maxDeterminizedStates = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(transpositions);
out.writeBoolean(unicodeAware);
out.writeVInt(editDistance);
out.writeVInt(fuzzyMinLength);
out.writeVInt(fuzzyPrefixLength);
out.writeVInt(maxDeterminizedStates);
} }
static FuzzyOptions parse(XContentParser parser) throws IOException { static FuzzyOptions parse(XContentParser parser) throws IOException {
@ -185,33 +204,6 @@ public class FuzzyOptions implements ToXContent, Writeable<FuzzyOptions> {
return builder; return builder;
} }
public static FuzzyOptions readFuzzyOptions(StreamInput in) throws IOException {
FuzzyOptions fuzzyOptions = new FuzzyOptions();
fuzzyOptions.readFrom(in);
return fuzzyOptions;
}
@Override
public FuzzyOptions readFrom(StreamInput in) throws IOException {
this.transpositions = in.readBoolean();
this.unicodeAware = in.readBoolean();
this.editDistance = in.readVInt();
this.fuzzyMinLength = in.readVInt();
this.fuzzyPrefixLength = in.readVInt();
this.maxDeterminizedStates = in.readVInt();
return this;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(transpositions);
out.writeBoolean(unicodeAware);
out.writeVInt(editDistance);
out.writeVInt(fuzzyMinLength);
out.writeVInt(fuzzyPrefixLength);
out.writeVInt(maxDeterminizedStates);
}
/** /**
* Options for fuzzy queries * Options for fuzzy queries
*/ */

View File

@ -67,14 +67,25 @@ public class RegexOptions implements ToXContent, Writeable<RegexOptions> {
private int flagsValue; private int flagsValue;
private int maxDeterminizedStates; private int maxDeterminizedStates;
private RegexOptions() {
}
private RegexOptions(int flagsValue, int maxDeterminizedStates) { private RegexOptions(int flagsValue, int maxDeterminizedStates) {
this.flagsValue = flagsValue; this.flagsValue = flagsValue;
this.maxDeterminizedStates = maxDeterminizedStates; this.maxDeterminizedStates = maxDeterminizedStates;
} }
/**
* Read from a stream.
*/
RegexOptions(StreamInput in) throws IOException {
this.flagsValue = in.readVInt();
this.maxDeterminizedStates = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(flagsValue);
out.writeVInt(maxDeterminizedStates);
}
/** /**
* Returns internal regular expression syntax flag value * Returns internal regular expression syntax flag value
* see {@link RegexpFlag#value()} * see {@link RegexpFlag#value()}
@ -126,25 +137,6 @@ public class RegexOptions implements ToXContent, Writeable<RegexOptions> {
return builder; return builder;
} }
public static RegexOptions readRegexOptions(StreamInput in) throws IOException {
RegexOptions regexOptions = new RegexOptions();
regexOptions.readFrom(in);
return regexOptions;
}
@Override
public RegexOptions readFrom(StreamInput in) throws IOException {
this.flagsValue = in.readVInt();
this.maxDeterminizedStates = in.readVInt();
return this;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(flagsValue);
out.writeVInt(maxDeterminizedStates);
}
/** /**
* Options for regular expression queries * Options for regular expression queries
*/ */

View File

@ -139,8 +139,8 @@ public class CategoryContextMapping extends ContextMapping<CategoryQueryContext>
} }
@Override @Override
protected CategoryQueryContext prototype() { protected CategoryQueryContext fromXContent(XContentParser parser) throws IOException {
return CategoryQueryContext.PROTOTYPE; return CategoryQueryContext.fromXContent(parser);
} }
/** /**

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.suggest.completion.context;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -35,7 +36,7 @@ import static org.elasticsearch.search.suggest.completion.context.CategoryContex
/** /**
* Defines the query context for {@link CategoryContextMapping} * Defines the query context for {@link CategoryContextMapping}
*/ */
public final class CategoryQueryContext implements QueryContext { public final class CategoryQueryContext implements ToXContent {
public static final String NAME = "category"; public static final String NAME = "category";
public static final CategoryQueryContext PROTOTYPE = new CategoryQueryContext("", 1, false); public static final CategoryQueryContext PROTOTYPE = new CategoryQueryContext("", 1, false);
@ -102,8 +103,7 @@ public final class CategoryQueryContext implements QueryContext {
CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField(CONTEXT_PREFIX)); CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField(CONTEXT_PREFIX));
} }
@Override public static CategoryQueryContext fromXContent(XContentParser parser) throws IOException {
public CategoryQueryContext fromXContext(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
Builder builder = builder(); Builder builder = builder();
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {

View File

@ -40,7 +40,7 @@ import java.util.Set;
* *
* Implementations have to define how contexts are parsed at query/index time * Implementations have to define how contexts are parsed at query/index time
*/ */
public abstract class ContextMapping<T extends QueryContext> implements ToXContent { public abstract class ContextMapping<T extends ToXContent> implements ToXContent {
public static final String FIELD_TYPE = "type"; public static final String FIELD_TYPE = "type";
public static final String FIELD_NAME = "name"; public static final String FIELD_NAME = "name";
@ -99,7 +99,7 @@ public abstract class ContextMapping<T extends QueryContext> implements ToXConte
/** /**
* Prototype for the query context * Prototype for the query context
*/ */
protected abstract T prototype(); protected abstract T fromXContent(XContentParser parser) throws IOException;
/** /**
* Parses query contexts for this mapper * Parses query contexts for this mapper
@ -108,10 +108,10 @@ public abstract class ContextMapping<T extends QueryContext> implements ToXConte
List<T> queryContexts = new ArrayList<>(); List<T> queryContexts = new ArrayList<>();
Token token = parser.nextToken(); Token token = parser.nextToken();
if (token == Token.START_OBJECT || token == Token.VALUE_STRING) { if (token == Token.START_OBJECT || token == Token.VALUE_STRING) {
queryContexts.add((T) prototype().fromXContext(parser)); queryContexts.add(fromXContent(parser));
} else if (token == Token.START_ARRAY) { } else if (token == Token.START_ARRAY) {
while (parser.nextToken() != Token.END_ARRAY) { while (parser.nextToken() != Token.END_ARRAY) {
queryContexts.add((T) prototype().fromXContext(parser)); queryContexts.add(fromXContent(parser));
} }
} }
return toInternalQueryContexts(queryContexts); return toInternalQueryContexts(queryContexts);

View File

@ -223,8 +223,8 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
} }
@Override @Override
protected GeoQueryContext prototype() { protected GeoQueryContext fromXContent(XContentParser parser) throws IOException {
return GeoQueryContext.PROTOTYPE; return GeoQueryContext.fromXContent(parser);
} }
/** /**

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -40,7 +41,7 @@ import static org.elasticsearch.search.suggest.completion.context.GeoContextMapp
/** /**
* Defines the query context for {@link GeoContextMapping} * Defines the query context for {@link GeoContextMapping}
*/ */
public final class GeoQueryContext implements QueryContext { public final class GeoQueryContext implements ToXContent {
public static final String NAME = "geo"; public static final String NAME = "geo";
public static final GeoQueryContext PROTOTYPE = new GeoQueryContext(null, 1, 12, Collections.emptyList()); public static final GeoQueryContext PROTOTYPE = new GeoQueryContext(null, 1, 12, Collections.emptyList());
@ -123,8 +124,7 @@ public final class GeoQueryContext implements QueryContext {
GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLon, new ParseField("lon")); GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLon, new ParseField("lon"));
} }
@Override public static GeoQueryContext fromXContent(XContentParser parser) throws IOException {
public GeoQueryContext fromXContext(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
GeoQueryContext.Builder builder = new Builder(); GeoQueryContext.Builder builder = new Builder();
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {

View File

@ -1,33 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion.context;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
* Interface for serializing/de-serializing completion query context
*/
public interface QueryContext extends ToXContent {
QueryContext fromXContext(XContentParser parser) throws IOException;
}

View File

@ -40,8 +40,11 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
//TODO public for tests import static java.lang.Math.log10;
public final class DirectCandidateGenerator extends CandidateGenerator { import static java.lang.Math.max;
import static java.lang.Math.round;
final class DirectCandidateGenerator extends CandidateGenerator {
private final DirectSpellChecker spellchecker; private final DirectSpellChecker spellchecker;
private final String field; private final String field;
@ -59,12 +62,13 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
private final BytesRefBuilder byteSpare = new BytesRefBuilder(); private final BytesRefBuilder byteSpare = new BytesRefBuilder();
private final int numCandidates; private final int numCandidates;
public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader, double nonErrorLikelihood, int numCandidates) throws IOException { public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader,
this(spellchecker, field, suggestMode, reader, nonErrorLikelihood, numCandidates, null, null, MultiFields.getTerms(reader, field)); double nonErrorLikelihood, int numCandidates) throws IOException {
this(spellchecker, field, suggestMode, reader, nonErrorLikelihood, numCandidates, null, null, MultiFields.getTerms(reader, field));
} }
public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader,
public DirectCandidateGenerator(DirectSpellChecker spellchecker, String field, SuggestMode suggestMode, IndexReader reader, double nonErrorLikelihood, int numCandidates, Analyzer preFilter, Analyzer postFilter, Terms terms) throws IOException { double nonErrorLikelihood, int numCandidates, Analyzer preFilter, Analyzer postFilter, Terms terms) throws IOException {
if (terms == null) { if (terms == null) {
throw new IllegalArgumentException("generator field [" + field + "] doesn't exist"); throw new IllegalArgumentException("generator field [" + field + "] doesn't exist");
} }
@ -113,9 +117,6 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
return field; return field;
} }
/* (non-Javadoc)
* @see org.elasticsearch.search.suggest.phrase.CandidateGenerator#drawCandidates(org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.CandidateSet, int)
*/
@Override @Override
public CandidateSet drawCandidates(CandidateSet set) throws IOException { public CandidateSet drawCandidates(CandidateSet set) throws IOException {
Candidate original = set.originalTerm; Candidate original = set.originalTerm;
@ -127,7 +128,8 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
for (int i = 0; i < suggestSimilar.length; i++) { for (int i = 0; i < suggestSimilar.length; i++) {
SuggestWord suggestWord = suggestSimilar[i]; SuggestWord suggestWord = suggestSimilar[i];
BytesRef candidate = new BytesRef(suggestWord.string); BytesRef candidate = new BytesRef(suggestWord.string);
postFilter(new Candidate(candidate, internalFrequency(candidate), suggestWord.score, score(suggestWord.freq, suggestWord.score, dictSize), false), spare, byteSpare, candidates); postFilter(new Candidate(candidate, internalFrequency(candidate), suggestWord.score,
score(suggestWord.freq, suggestWord.score, dictSize), false), spare, byteSpare, candidates);
} }
set.addCandidates(candidates); set.addCandidates(candidates);
return set; return set;
@ -148,7 +150,8 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
return result.get(); return result.get();
} }
protected void postFilter(final Candidate candidate, final CharsRefBuilder spare, BytesRefBuilder byteSpare, final List<Candidate> candidates) throws IOException { protected void postFilter(final Candidate candidate, final CharsRefBuilder spare, BytesRefBuilder byteSpare,
final List<Candidate> candidates) throws IOException {
if (postFilter == null) { if (postFilter == null) {
candidates.add(candidate); candidates.add(candidate);
} else { } else {
@ -163,9 +166,11 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
// We should not use frequency(term) here because it will analyze the term again // We should not use frequency(term) here because it will analyze the term again
// If preFilter and postFilter are the same analyzer it would fail. // If preFilter and postFilter are the same analyzer it would fail.
long freq = internalFrequency(term); long freq = internalFrequency(term);
candidates.add(new Candidate(result.toBytesRef(), freq, candidate.stringDistance, score(candidate.frequency, candidate.stringDistance, dictSize), false)); candidates.add(new Candidate(result.toBytesRef(), freq, candidate.stringDistance,
score(candidate.frequency, candidate.stringDistance, dictSize), false));
} else { } else {
candidates.add(new Candidate(result.toBytesRef(), candidate.frequency, nonErrorLikelihood, score(candidate.frequency, candidate.stringDistance, dictSize), false)); candidates.add(new Candidate(result.toBytesRef(), candidate.frequency, nonErrorLikelihood,
score(candidate.frequency, candidate.stringDistance, dictSize), false));
} }
} }
}, spare); }, spare);
@ -178,7 +183,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
protected long thresholdFrequency(long termFrequency, long dictionarySize) { protected long thresholdFrequency(long termFrequency, long dictionarySize) {
if (termFrequency > 0) { if (termFrequency > 0) {
return Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1)); return max(0, round(termFrequency * (log10(termFrequency - frequencyPlateau) * (1.0 / log10(logBase))) + 1));
} }
return 0; return 0;
@ -232,8 +237,11 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
@Override @Override
public String toString() { public String toString() {
return "Candidate [term=" + term.utf8ToString() + ", stringDistance=" + stringDistance + ", score=" + score + ", frequency=" + frequency + return "Candidate [term=" + term.utf8ToString()
(userInput ? ", userInput" : "" ) + "]"; + ", stringDistance=" + stringDistance
+ ", score=" + score
+ ", frequency=" + frequency
+ (userInput ? ", userInput" : "") + "]";
} }
@Override @Override

View File

@ -44,7 +44,6 @@ public final class DirectCandidateGeneratorBuilder
implements CandidateGenerator { implements CandidateGenerator {
private static final String TYPE = "direct_generator"; private static final String TYPE = "direct_generator";
static final DirectCandidateGeneratorBuilder PROTOTYPE = new DirectCandidateGeneratorBuilder("_na_");
static final ParseField DIRECT_GENERATOR_FIELD = new ParseField(TYPE); static final ParseField DIRECT_GENERATOR_FIELD = new ParseField(TYPE);
static final ParseField FIELDNAME_FIELD = new ParseField("field"); static final ParseField FIELDNAME_FIELD = new ParseField("field");
@ -108,6 +107,44 @@ public final class DirectCandidateGeneratorBuilder
return generator; return generator;
} }
/**
* Read from a stream.
*/
public DirectCandidateGeneratorBuilder(StreamInput in) throws IOException {
field = in.readString();
suggestMode = in.readOptionalString();
accuracy = in.readOptionalFloat();
size = in.readOptionalVInt();
sort = in.readOptionalString();
stringDistance = in.readOptionalString();
maxEdits = in.readOptionalVInt();
maxInspections = in.readOptionalVInt();
maxTermFreq = in.readOptionalFloat();
prefixLength = in.readOptionalVInt();
minWordLength = in.readOptionalVInt();
minDocFreq = in.readOptionalFloat();
preFilter = in.readOptionalString();
postFilter = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
out.writeOptionalString(suggestMode);
out.writeOptionalFloat(accuracy);
out.writeOptionalVInt(size);
out.writeOptionalString(sort);
out.writeOptionalString(stringDistance);
out.writeOptionalVInt(maxEdits);
out.writeOptionalVInt(maxInspections);
out.writeOptionalFloat(maxTermFreq);
out.writeOptionalVInt(prefixLength);
out.writeOptionalVInt(minWordLength);
out.writeOptionalFloat(minDocFreq);
out.writeOptionalString(preFilter);
out.writeOptionalString(postFilter);
}
/** /**
* The global suggest mode controls what suggested terms are included or * The global suggest mode controls what suggested terms are included or
* controls for what suggest text tokens, terms should be suggested for. * controls for what suggest text tokens, terms should be suggested for.
@ -334,15 +371,11 @@ public final class DirectCandidateGeneratorBuilder
PARSER.declareInt((tp, i) -> tp.v2().prefixLength(i), PREFIX_LENGTH_FIELD); PARSER.declareInt((tp, i) -> tp.v2().prefixLength(i), PREFIX_LENGTH_FIELD);
} }
@Override public static DirectCandidateGeneratorBuilder fromXContent(QueryParseContext parseContext) throws IOException {
public DirectCandidateGeneratorBuilder fromXContent(QueryParseContext parseContext) throws IOException {
DirectCandidateGeneratorBuilder tempGenerator = new DirectCandidateGeneratorBuilder("_na_"); DirectCandidateGeneratorBuilder tempGenerator = new DirectCandidateGeneratorBuilder("_na_");
Set<String> tmpFieldName = new HashSet<>(1); // bucket for the field // bucket for the field name, needed as constructor arg later
// name, needed as Set<String> tmpFieldName = new HashSet<>(1);
// constructor arg PARSER.parse(parseContext.parser(), new Tuple<Set<String>, DirectCandidateGeneratorBuilder>(tmpFieldName, tempGenerator));
// later
PARSER.parse(parseContext.parser(),
new Tuple<Set<String>, DirectCandidateGeneratorBuilder>(tmpFieldName, tempGenerator));
if (tmpFieldName.size() != 1) { if (tmpFieldName.size() != 1) {
throw new IllegalArgumentException("[" + TYPE + "] expects exactly one field parameter, but found " + tmpFieldName); throw new IllegalArgumentException("[" + TYPE + "] expects exactly one field parameter, but found " + tmpFieldName);
} }
@ -405,58 +438,6 @@ public final class DirectCandidateGeneratorBuilder
} }
} }
@Override
public DirectCandidateGeneratorBuilder readFrom(StreamInput in) throws IOException {
DirectCandidateGeneratorBuilder cg = new DirectCandidateGeneratorBuilder(in.readString());
cg.suggestMode = in.readOptionalString();
if (in.readBoolean()) {
cg.accuracy = in.readFloat();
}
cg.size = in.readOptionalVInt();
cg.sort = in.readOptionalString();
cg.stringDistance = in.readOptionalString();
cg.maxEdits = in.readOptionalVInt();
cg.maxInspections = in.readOptionalVInt();
if (in.readBoolean()) {
cg.maxTermFreq = in.readFloat();
}
cg.prefixLength = in.readOptionalVInt();
cg.minWordLength = in.readOptionalVInt();
if (in.readBoolean()) {
cg.minDocFreq = in.readFloat();
}
cg.preFilter = in.readOptionalString();
cg.postFilter = in.readOptionalString();
return cg;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
out.writeOptionalString(suggestMode);
out.writeBoolean(accuracy != null);
if (accuracy != null) {
out.writeFloat(accuracy);
}
out.writeOptionalVInt(size);
out.writeOptionalString(sort);
out.writeOptionalString(stringDistance);
out.writeOptionalVInt(maxEdits);
out.writeOptionalVInt(maxInspections);
out.writeBoolean(maxTermFreq != null);
if (maxTermFreq != null) {
out.writeFloat(maxTermFreq);
}
out.writeOptionalVInt(prefixLength);
out.writeOptionalVInt(minWordLength);
out.writeBoolean(minDocFreq != null);
if (minDocFreq != null) {
out.writeFloat(minDocFreq);
}
out.writeOptionalString(preFilter);
out.writeOptionalString(postFilter);
}
@Override @Override
public final int hashCode() { public final int hashCode() {
return Objects.hash(field, preFilter, postFilter, suggestMode, accuracy, return Objects.hash(field, preFilter, postFilter, suggestMode, accuracy,

View File

@ -44,15 +44,15 @@ import java.util.Objects;
* </p> * </p>
*/ */
public final class Laplace extends SmoothingModel { public final class Laplace extends SmoothingModel {
private double alpha = DEFAULT_LAPLACE_ALPHA; public static final String NAME = "laplace";
private static final String NAME = "laplace";
private static final ParseField ALPHA_FIELD = new ParseField("alpha"); private static final ParseField ALPHA_FIELD = new ParseField("alpha");
static final ParseField PARSE_FIELD = new ParseField(NAME); static final ParseField PARSE_FIELD = new ParseField(NAME);
/** /**
* Default alpha parameter for laplace smoothing * Default alpha parameter for laplace smoothing
*/ */
public static final double DEFAULT_LAPLACE_ALPHA = 0.5; public static final double DEFAULT_LAPLACE_ALPHA = 0.5;
public static final Laplace PROTOTYPE = new Laplace(DEFAULT_LAPLACE_ALPHA);
private double alpha = DEFAULT_LAPLACE_ALPHA;
/** /**
* Creates a Laplace smoothing model. * Creates a Laplace smoothing model.
@ -62,6 +62,18 @@ public final class Laplace extends SmoothingModel {
this.alpha = alpha; this.alpha = alpha;
} }
/**
* Read from a stream.
*/
public Laplace(StreamInput in) throws IOException {
alpha = in.readDouble();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(alpha);
}
/** /**
* @return the laplace model alpha parameter * @return the laplace model alpha parameter
*/ */
@ -80,16 +92,6 @@ public final class Laplace extends SmoothingModel {
return NAME; return NAME;
} }
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(alpha);
}
@Override
public SmoothingModel readFrom(StreamInput in) throws IOException {
return new Laplace(in.readDouble());
}
@Override @Override
protected boolean doEquals(SmoothingModel other) { protected boolean doEquals(SmoothingModel other) {
Laplace otherModel = (Laplace) other; Laplace otherModel = (Laplace) other;
@ -101,8 +103,7 @@ public final class Laplace extends SmoothingModel {
return Objects.hash(alpha); return Objects.hash(alpha);
} }
@Override public static SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;

View File

@ -25,19 +25,11 @@ import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate; import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidate;
import java.io.IOException; import java.io.IOException;
//TODO public for tests
public final class LaplaceScorer extends WordScorer {
public static final WordScorerFactory FACTORY = new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) throws IOException {
return new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, 0.5);
}
};
final class LaplaceScorer extends WordScorer {
private double alpha; private double alpha;
public LaplaceScorer(IndexReader reader, Terms terms, String field, LaplaceScorer(IndexReader reader, Terms terms, String field,
double realWordLikelyhood, BytesRef separator, double alpha) throws IOException { double realWordLikelyhood, BytesRef separator, double alpha) throws IOException {
super(reader, terms, field, realWordLikelyhood, separator); super(reader, terms, field, realWordLikelyhood, separator);
this.alpha = alpha; this.alpha = alpha;

View File

@ -33,8 +33,8 @@ public final class LinearInterpoatingScorer extends WordScorer {
private final double bigramLambda; private final double bigramLambda;
private final double trigramLambda; private final double trigramLambda;
public LinearInterpoatingScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator, double trigramLambda, double bigramLambda, double unigramLambda) public LinearInterpoatingScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator,
throws IOException { double trigramLambda, double bigramLambda, double unigramLambda) throws IOException {
super(reader, terms, field, realWordLikelyhood, separator); super(reader, terms, field, realWordLikelyhood, separator);
double sum = unigramLambda + bigramLambda + trigramLambda; double sum = unigramLambda + bigramLambda + trigramLambda;
this.unigramLambda = unigramLambda / sum; this.unigramLambda = unigramLambda / sum;

View File

@ -45,16 +45,16 @@ import java.util.Objects;
* </p> * </p>
*/ */
public final class LinearInterpolation extends SmoothingModel { public final class LinearInterpolation extends SmoothingModel {
private static final String NAME = "linear"; public static final String NAME = "linear";
public static final LinearInterpolation PROTOTYPE = new LinearInterpolation(0.8, 0.1, 0.1);
private final double trigramLambda;
private final double bigramLambda;
private final double unigramLambda;
static final ParseField PARSE_FIELD = new ParseField(NAME); static final ParseField PARSE_FIELD = new ParseField(NAME);
private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda"); private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda");
private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda"); private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda");
private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda"); private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda");
private final double trigramLambda;
private final double bigramLambda;
private final double unigramLambda;
/** /**
* Creates a linear interpolation smoothing model. * Creates a linear interpolation smoothing model.
* *
@ -77,6 +77,22 @@ public final class LinearInterpolation extends SmoothingModel {
this.unigramLambda = unigramLambda; this.unigramLambda = unigramLambda;
} }
/**
* Read from a stream.
*/
public LinearInterpolation(StreamInput in) throws IOException {
trigramLambda = in.readDouble();
bigramLambda = in.readDouble();
unigramLambda = in.readDouble();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(trigramLambda);
out.writeDouble(bigramLambda);
out.writeDouble(unigramLambda);
}
public double getTrigramLambda() { public double getTrigramLambda() {
return this.trigramLambda; return this.trigramLambda;
} }
@ -102,18 +118,6 @@ public final class LinearInterpolation extends SmoothingModel {
return NAME; return NAME;
} }
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(trigramLambda);
out.writeDouble(bigramLambda);
out.writeDouble(unigramLambda);
}
@Override
public LinearInterpolation readFrom(StreamInput in) throws IOException {
return new LinearInterpolation(in.readDouble(), in.readDouble(), in.readDouble());
}
@Override @Override
protected boolean doEquals(SmoothingModel other) { protected boolean doEquals(SmoothingModel other) {
final LinearInterpolation otherModel = (LinearInterpolation) other; final LinearInterpolation otherModel = (LinearInterpolation) other;
@ -127,8 +131,7 @@ public final class LinearInterpolation extends SmoothingModel {
return Objects.hash(trigramLambda, bigramLambda, unigramLambda); return Objects.hash(trigramLambda, bigramLambda, unigramLambda);
} }
@Override public static LinearInterpolation innerFromXContent(QueryParseContext parseContext) throws IOException {
public LinearInterpolation innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;

View File

@ -29,9 +29,11 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
@ -53,7 +55,9 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
private final BytesRef SEPARATOR = new BytesRef(" "); private final BytesRef SEPARATOR = new BytesRef(" ");
private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion"; private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion";
public static final PhraseSuggester PROTOTYPE = new PhraseSuggester(); public static final PhraseSuggester INSTANCE = new PhraseSuggester();
private PhraseSuggester() {}
/* /*
* More Ideas: * More Ideas:
@ -144,8 +148,12 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
} }
@Override @Override
public SuggestionBuilder<?> getBuilderPrototype() { public SuggestionBuilder<?> innerFromXContent(QueryParseContext context) throws IOException {
return PhraseSuggestionBuilder.PROTOTYPE; return PhraseSuggestionBuilder.innerFromXContent(context);
} }
@Override
public SuggestionBuilder<?> read(StreamInput in) throws IOException {
return new PhraseSuggestionBuilder(in);
}
} }

View File

@ -60,8 +60,6 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
private static final String SUGGESTION_NAME = "phrase"; private static final String SUGGESTION_NAME = "phrase";
public static final PhraseSuggestionBuilder PROTOTYPE = new PhraseSuggestionBuilder("_na_");
protected static final ParseField MAXERRORS_FIELD = new ParseField("max_errors"); protected static final ParseField MAXERRORS_FIELD = new ParseField("max_errors");
protected static final ParseField RWE_LIKELIHOOD_FIELD = new ParseField("real_word_error_likelihood"); protected static final ParseField RWE_LIKELIHOOD_FIELD = new ParseField("real_word_error_likelihood");
protected static final ParseField SEPARATOR_FIELD = new ParseField("separator"); protected static final ParseField SEPARATOR_FIELD = new ParseField("separator");
@ -121,6 +119,76 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
generators.putAll(in.generators); generators.putAll(in.generators);
} }
/**
* Read from a stream.
*/
PhraseSuggestionBuilder(StreamInput in) throws IOException {
super(in);
maxErrors = in.readFloat();
realWordErrorLikelihood = in.readFloat();
confidence = in.readFloat();
gramSize = in.readOptionalVInt();
if (in.readBoolean()) {
model = in.readPhraseSuggestionSmoothingModel();
}
forceUnigrams = in.readBoolean();
tokenLimit = in.readVInt();
preTag = in.readOptionalString();
postTag = in.readOptionalString();
separator = in.readString();
if (in.readBoolean()) {
collateQuery = Template.readTemplate(in);
}
collateParams = in.readMap();
collatePrune = in.readOptionalBoolean();
int generatorsEntries = in.readVInt();
for (int i = 0; i < generatorsEntries; i++) {
String type = in.readString();
int numberOfGenerators = in.readVInt();
List<CandidateGenerator> generatorsList = new ArrayList<>(numberOfGenerators);
for (int g = 0; g < numberOfGenerators; g++) {
DirectCandidateGeneratorBuilder generator = new DirectCandidateGeneratorBuilder(in);
generatorsList.add(generator);
}
generators.put(type, generatorsList);
}
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeFloat(maxErrors);
out.writeFloat(realWordErrorLikelihood);
out.writeFloat(confidence);
out.writeOptionalVInt(gramSize);
boolean hasModel = model != null;
out.writeBoolean(hasModel);
if (hasModel) {
out.writePhraseSuggestionSmoothingModel(model);
}
out.writeBoolean(forceUnigrams);
out.writeVInt(tokenLimit);
out.writeOptionalString(preTag);
out.writeOptionalString(postTag);
out.writeString(separator);
if (collateQuery != null) {
out.writeBoolean(true);
collateQuery.writeTo(out);
} else {
out.writeBoolean(false);
}
out.writeMap(collateParams);
out.writeOptionalBoolean(collatePrune);
out.writeVInt(this.generators.size());
for (Entry<String, List<CandidateGenerator>> entry : this.generators.entrySet()) {
out.writeString(entry.getKey());
List<CandidateGenerator> generatorsList = entry.getValue();
out.writeVInt(generatorsList.size());
for (CandidateGenerator generator : generatorsList) {
generator.writeTo(out);
}
}
}
/** /**
* Sets the gram size for the n-gram model used for this suggester. The * Sets the gram size for the n-gram model used for this suggester. The
* default value is <tt>1</tt> corresponding to <tt>unigrams</tt>. Use * default value is <tt>1</tt> corresponding to <tt>unigrams</tt>. Use
@ -422,8 +490,7 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
return builder; return builder;
} }
@Override static PhraseSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
protected PhraseSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
PhraseSuggestionBuilder tmpSuggestion = new PhraseSuggestionBuilder("_na_"); PhraseSuggestionBuilder tmpSuggestion = new PhraseSuggestionBuilder("_na_");
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher(); ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
@ -464,7 +531,7 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
if (parseFieldMatcher.match(currentFieldName, DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD)) { if (parseFieldMatcher.match(currentFieldName, DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD)) {
// for now we only have a single type of generators // for now we only have a single type of generators
while ((token = parser.nextToken()) == Token.START_OBJECT) { while ((token = parser.nextToken()) == Token.START_OBJECT) {
tmpSuggestion.addCandidateGenerator(DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(parseContext)); tmpSuggestion.addCandidateGenerator(DirectCandidateGeneratorBuilder.fromXContent(parseContext));
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), throw new ParsingException(parser.getTokenLocation(),
@ -578,10 +645,6 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
suggestionContext.setCollatePrune(this.collatePrune); suggestionContext.setCollatePrune(this.collatePrune);
} }
if (suggestionContext.model() == null) {
suggestionContext.setModel(StupidBackoffScorer.FACTORY);
}
if (this.gramSize == null || suggestionContext.generators().isEmpty()) { if (this.gramSize == null || suggestionContext.generators().isEmpty()) {
final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils
.getShingleFilterFactory(suggestionContext.getAnalyzer()); .getShingleFilterFactory(suggestionContext.getAnalyzer());
@ -623,75 +686,6 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
return SUGGESTION_NAME; return SUGGESTION_NAME;
} }
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeFloat(maxErrors);
out.writeFloat(realWordErrorLikelihood);
out.writeFloat(confidence);
out.writeOptionalVInt(gramSize);
boolean hasModel = model != null;
out.writeBoolean(hasModel);
if (hasModel) {
out.writePhraseSuggestionSmoothingModel(model);
}
out.writeBoolean(forceUnigrams);
out.writeVInt(tokenLimit);
out.writeOptionalString(preTag);
out.writeOptionalString(postTag);
out.writeString(separator);
if (collateQuery != null) {
out.writeBoolean(true);
collateQuery.writeTo(out);
} else {
out.writeBoolean(false);
}
out.writeMap(collateParams);
out.writeOptionalBoolean(collatePrune);
out.writeVInt(this.generators.size());
for (Entry<String, List<CandidateGenerator>> entry : this.generators.entrySet()) {
out.writeString(entry.getKey());
List<CandidateGenerator> generatorsList = entry.getValue();
out.writeVInt(generatorsList.size());
for (CandidateGenerator generator : generatorsList) {
generator.writeTo(out);
}
}
}
@Override
public PhraseSuggestionBuilder doReadFrom(StreamInput in, String field) throws IOException {
PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(field);
builder.maxErrors = in.readFloat();
builder.realWordErrorLikelihood = in.readFloat();
builder.confidence = in.readFloat();
builder.gramSize = in.readOptionalVInt();
if (in.readBoolean()) {
builder.model = in.readPhraseSuggestionSmoothingModel();
}
builder.forceUnigrams = in.readBoolean();
builder.tokenLimit = in.readVInt();
builder.preTag = in.readOptionalString();
builder.postTag = in.readOptionalString();
builder.separator = in.readString();
if (in.readBoolean()) {
builder.collateQuery = Template.readTemplate(in);
}
builder.collateParams = in.readMap();
builder.collatePrune = in.readOptionalBoolean();
int generatorsEntries = in.readVInt();
for (int i = 0; i < generatorsEntries; i++) {
String type = in.readString();
int numberOfGenerators = in.readVInt();
List<CandidateGenerator> generatorsList = new ArrayList<>(numberOfGenerators);
for (int g = 0; g < numberOfGenerators; g++) {
DirectCandidateGeneratorBuilder generator = DirectCandidateGeneratorBuilder.PROTOTYPE.readFrom(in);
generatorsList.add(generator);
}
builder.generators.put(type, generatorsList);
}
return builder;
}
@Override @Override
protected boolean doEquals(PhraseSuggestionBuilder other) { protected boolean doEquals(PhraseSuggestionBuilder other) {
return Objects.equals(maxErrors, other.maxErrors) && return Objects.equals(maxErrors, other.maxErrors) &&
@ -723,8 +717,6 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSuggestionB
public interface CandidateGenerator extends Writeable<CandidateGenerator>, ToXContent { public interface CandidateGenerator extends Writeable<CandidateGenerator>, ToXContent {
String getType(); String getType();
CandidateGenerator fromXContent(QueryParseContext parseContext) throws IOException;
PhraseSuggestionContext.DirectCandidateGenerator build(MapperService mapperService) throws IOException; PhraseSuggestionContext.DirectCandidateGenerator build(MapperService mapperService) throws IOException;
} }
} }

View File

@ -19,6 +19,8 @@
package org.elasticsearch.search.suggest.phrase; package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
@ -38,6 +40,8 @@ class PhraseSuggestionContext extends SuggestionContext {
static final float DEFAULT_RWE_ERRORLIKELIHOOD = 0.95f; static final float DEFAULT_RWE_ERRORLIKELIHOOD = 0.95f;
static final float DEFAULT_MAX_ERRORS = 0.5f; static final float DEFAULT_MAX_ERRORS = 0.5f;
static final String DEFAULT_SEPARATOR = " "; static final String DEFAULT_SEPARATOR = " ";
static final WordScorer.WordScorerFactory DEFAULT_SCORER = (IndexReader reader, Terms terms, String field, double realWordLikelyhood,
BytesRef separator) -> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, 0.4f);
private float maxErrors = DEFAULT_MAX_ERRORS; private float maxErrors = DEFAULT_MAX_ERRORS;
private BytesRef separator = new BytesRef(DEFAULT_SEPARATOR); private BytesRef separator = new BytesRef(DEFAULT_SEPARATOR);
@ -52,10 +56,10 @@ class PhraseSuggestionContext extends SuggestionContext {
private boolean prune = DEFAULT_COLLATE_PRUNE; private boolean prune = DEFAULT_COLLATE_PRUNE;
private List<DirectCandidateGenerator> generators = new ArrayList<>(); private List<DirectCandidateGenerator> generators = new ArrayList<>();
private Map<String, Object> collateScriptParams = new HashMap<>(1); private Map<String, Object> collateScriptParams = new HashMap<>(1);
private WordScorer.WordScorerFactory scorer; private WordScorer.WordScorerFactory scorer = DEFAULT_SCORER;
public PhraseSuggestionContext(QueryShardContext shardContext) { public PhraseSuggestionContext(QueryShardContext shardContext) {
super(PhraseSuggester.PROTOTYPE, shardContext); super(PhraseSuggester.INSTANCE, shardContext);
} }
public float maxErrors() { public float maxErrors() {

View File

@ -76,11 +76,11 @@ public abstract class SmoothingModel implements NamedWriteable<SmoothingModel>,
fieldName = parser.currentName(); fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) { if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) {
model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext); model = LinearInterpolation.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) { } else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) {
model = Laplace.PROTOTYPE.innerFromXContent(parseContext); model = Laplace.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) { } else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) {
model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext); model = StupidBackoff.innerFromXContent(parseContext);
} else { } else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]"); throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
} }
@ -92,8 +92,6 @@ public abstract class SmoothingModel implements NamedWriteable<SmoothingModel>,
return model; return model;
} }
public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException;
public abstract WordScorerFactory buildWordScorerFactory(); public abstract WordScorerFactory buildWordScorerFactory();
/** /**

View File

@ -49,12 +49,12 @@ public final class StupidBackoff extends SmoothingModel {
* Default discount parameter for {@link StupidBackoff} smoothing * Default discount parameter for {@link StupidBackoff} smoothing
*/ */
public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4; public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4;
public static final StupidBackoff PROTOTYPE = new StupidBackoff(DEFAULT_BACKOFF_DISCOUNT); public static final String NAME = "stupid_backoff";
private double discount = DEFAULT_BACKOFF_DISCOUNT;
private static final String NAME = "stupid_backoff";
private static final ParseField DISCOUNT_FIELD = new ParseField("discount"); private static final ParseField DISCOUNT_FIELD = new ParseField("discount");
static final ParseField PARSE_FIELD = new ParseField(NAME); static final ParseField PARSE_FIELD = new ParseField(NAME);
private double discount = DEFAULT_BACKOFF_DISCOUNT;
/** /**
* Creates a Stupid-Backoff smoothing model. * Creates a Stupid-Backoff smoothing model.
* *
@ -65,6 +65,18 @@ public final class StupidBackoff extends SmoothingModel {
this.discount = discount; this.discount = discount;
} }
/**
* Read from a stream.
*/
public StupidBackoff(StreamInput in) throws IOException {
discount = in.readDouble();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(discount);
}
/** /**
* @return the discount parameter of the model * @return the discount parameter of the model
*/ */
@ -83,16 +95,6 @@ public final class StupidBackoff extends SmoothingModel {
return NAME; return NAME;
} }
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(discount);
}
@Override
public StupidBackoff readFrom(StreamInput in) throws IOException {
return new StupidBackoff(in.readDouble());
}
@Override @Override
protected boolean doEquals(SmoothingModel other) { protected boolean doEquals(SmoothingModel other) {
StupidBackoff otherModel = (StupidBackoff) other; StupidBackoff otherModel = (StupidBackoff) other;
@ -104,8 +106,7 @@ public final class StupidBackoff extends SmoothingModel {
return Objects.hash(discount); return Objects.hash(discount);
} }
@Override public static SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;

View File

@ -26,14 +26,7 @@ import org.elasticsearch.search.suggest.phrase.DirectCandidateGenerator.Candidat
import java.io.IOException; import java.io.IOException;
public class StupidBackoffScorer extends WordScorer { class StupidBackoffScorer extends WordScorer {
public static final WordScorerFactory FACTORY = new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) throws IOException {
return new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, 0.4f);
}
};
private final double discount; private final double discount;
public StupidBackoffScorer(IndexReader reader, Terms terms,String field, double realWordLikelyhood, BytesRef separator, double discount) public StupidBackoffScorer(IndexReader reader, Terms terms,String field, double realWordLikelyhood, BytesRef separator, double discount)

View File

@ -27,7 +27,9 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder;
@ -39,7 +41,9 @@ import java.util.List;
public final class TermSuggester extends Suggester<TermSuggestionContext> { public final class TermSuggester extends Suggester<TermSuggestionContext> {
public static final TermSuggester PROTOTYPE = new TermSuggester(); public static final TermSuggester INSTANCE = new TermSuggester();
private TermSuggester() {}
@Override @Override
public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare)
@ -79,6 +83,16 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
return result; return result;
} }
@Override
public SuggestionBuilder<?> innerFromXContent(QueryParseContext context) throws IOException {
return TermSuggestionBuilder.innerFromXContent(context);
}
@Override
public SuggestionBuilder<?> read(StreamInput in) throws IOException {
return new TermSuggestionBuilder(in);
}
private static class Token { private static class Token {
public final Term term; public final Term term;
@ -92,10 +106,4 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
} }
} }
@Override
public SuggestionBuilder<?> getBuilderPrototype() {
return TermSuggestionBuilder.PROTOTYPE;
}
} }

View File

@ -111,7 +111,7 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
@Override @Override
protected void innerReadFrom(StreamInput in) throws IOException { protected void innerReadFrom(StreamInput in) throws IOException {
super.innerReadFrom(in); super.innerReadFrom(in);
sort = SortBy.PROTOTYPE.readFrom(in); sort = SortBy.readFromStream(in);
} }
@Override @Override

View File

@ -69,8 +69,6 @@ import static org.elasticsearch.search.suggest.SuggestUtils.Fields.SUGGEST_MODE;
* global options, but are only applicable for this suggestion. * global options, but are only applicable for this suggestion.
*/ */
public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuilder> { public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuilder> {
public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder("_na_");
private static final String SUGGESTION_NAME = "term"; private static final String SUGGESTION_NAME = "term";
private SuggestMode suggestMode = SuggestMode.MISSING; private SuggestMode suggestMode = SuggestMode.MISSING;
@ -105,6 +103,37 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
minDocFreq = in.minDocFreq; minDocFreq = in.minDocFreq;
} }
/**
* Read from a stream.
*/
TermSuggestionBuilder(StreamInput in) throws IOException {
super(in);
suggestMode = SuggestMode.readFromStream(in);
accuracy = in.readFloat();
sort = SortBy.readFromStream(in);
stringDistance = StringDistanceImpl.readFromStream(in);
maxEdits = in.readVInt();
maxInspections = in.readVInt();
maxTermFreq = in.readFloat();
prefixLength = in.readVInt();
minWordLength = in.readVInt();
minDocFreq = in.readFloat();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
suggestMode.writeTo(out);
out.writeFloat(accuracy);
sort.writeTo(out);
stringDistance.writeTo(out);
out.writeVInt(maxEdits);
out.writeVInt(maxInspections);
out.writeFloat(maxTermFreq);
out.writeVInt(prefixLength);
out.writeVInt(minWordLength);
out.writeFloat(minDocFreq);
}
/** /**
* The global suggest mode controls what suggested terms are included or * The global suggest mode controls what suggested terms are included or
* controls for what suggest text tokens, terms should be suggested for. * controls for what suggest text tokens, terms should be suggested for.
@ -360,8 +389,7 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
return builder; return builder;
} }
@Override static TermSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
protected TermSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
TermSuggestionBuilder tmpSuggestion = new TermSuggestionBuilder("_na_"); TermSuggestionBuilder tmpSuggestion = new TermSuggestionBuilder("_na_");
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher(); ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
@ -442,36 +470,6 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
return SUGGESTION_NAME; return SUGGESTION_NAME;
} }
@Override
public void doWriteTo(StreamOutput out) throws IOException {
suggestMode.writeTo(out);
out.writeFloat(accuracy);
sort.writeTo(out);
stringDistance.writeTo(out);
out.writeVInt(maxEdits);
out.writeVInt(maxInspections);
out.writeFloat(maxTermFreq);
out.writeVInt(prefixLength);
out.writeVInt(minWordLength);
out.writeFloat(minDocFreq);
}
@Override
public TermSuggestionBuilder doReadFrom(StreamInput in, String field) throws IOException {
TermSuggestionBuilder builder = new TermSuggestionBuilder(field);
builder.suggestMode = SuggestMode.PROTOTYPE.readFrom(in);
builder.accuracy = in.readFloat();
builder.sort = SortBy.PROTOTYPE.readFrom(in);
builder.stringDistance = StringDistanceImpl.PROTOTYPE.readFrom(in);
builder.maxEdits = in.readVInt();
builder.maxInspections = in.readVInt();
builder.maxTermFreq = in.readFloat();
builder.prefixLength = in.readVInt();
builder.minWordLength = in.readVInt();
builder.minDocFreq = in.readFloat();
return builder;
}
@Override @Override
protected boolean doEquals(TermSuggestionBuilder other) { protected boolean doEquals(TermSuggestionBuilder other) {
return Objects.equals(suggestMode, other.suggestMode) && return Objects.equals(suggestMode, other.suggestMode) &&
@ -516,15 +514,12 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
} }
}; };
protected static SuggestMode PROTOTYPE = MISSING;
@Override @Override
public void writeTo(final StreamOutput out) throws IOException { public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(ordinal()); out.writeVInt(ordinal());
} }
@Override public static SuggestMode readFromStream(final StreamInput in) throws IOException {
public SuggestMode readFrom(final StreamInput in) throws IOException {
int ordinal = in.readVInt(); int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) { if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown SuggestMode ordinal [" + ordinal + "]"); throw new IOException("Unknown SuggestMode ordinal [" + ordinal + "]");
@ -579,15 +574,12 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
} }
}; };
protected static StringDistanceImpl PROTOTYPE = INTERNAL;
@Override @Override
public void writeTo(final StreamOutput out) throws IOException { public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(ordinal()); out.writeVInt(ordinal());
} }
@Override public static StringDistanceImpl readFromStream(final StreamInput in) throws IOException {
public StringDistanceImpl readFrom(final StreamInput in) throws IOException {
int ordinal = in.readVInt(); int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) { if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown StringDistanceImpl ordinal [" + ordinal + "]"); throw new IOException("Unknown StringDistanceImpl ordinal [" + ordinal + "]");

View File

@ -27,7 +27,7 @@ final class TermSuggestionContext extends SuggestionContext {
private final DirectSpellcheckerSettings settings = new DirectSpellcheckerSettings(); private final DirectSpellcheckerSettings settings = new DirectSpellcheckerSettings();
public TermSuggestionContext(QueryShardContext shardContext) { public TermSuggestionContext(QueryShardContext shardContext) {
super(TermSuggester.PROTOTYPE, shardContext); super(TermSuggester.INSTANCE, shardContext);
} }
public DirectSpellcheckerSettings getDirectSpellCheckerSettings() { public DirectSpellcheckerSettings getDirectSpellCheckerSettings() {

View File

@ -29,6 +29,11 @@ import static org.hamcrest.Matchers.equalTo;
* Abstract class offering base functionality for testing @{link Writeable} enums. * Abstract class offering base functionality for testing @{link Writeable} enums.
*/ */
public abstract class AbstractWriteableEnumTestCase extends ESTestCase { public abstract class AbstractWriteableEnumTestCase extends ESTestCase {
private final Writeable.Reader<?> reader;
public AbstractWriteableEnumTestCase(Writeable.Reader<?> reader) {
this.reader = reader;
}
/** /**
* Test that the ordinals for the enum are consistent (i.e. the order hasn't changed) * Test that the ordinals for the enum are consistent (i.e. the order hasn't changed)
@ -52,7 +57,7 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase {
public abstract void testWriteTo() throws IOException; public abstract void testWriteTo() throws IOException;
// a convenience method for testing the write of a writeable enum // a convenience method for testing the write of a writeable enum
protected static void assertWriteToStream(final Writeable writeableEnum, final int ordinal) throws IOException { protected static <T> void assertWriteToStream(final Writeable<T> writeableEnum, final int ordinal) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
writeableEnum.writeTo(out); writeableEnum.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = StreamInput.wrap(out.bytes())) {
@ -62,13 +67,12 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase {
} }
// a convenience method for testing the read of a writeable enum // a convenience method for testing the read of a writeable enum
protected static <T extends Writeable<T>> void assertReadFromStream(final int ordinal, final Writeable<T> expected) throws IOException { protected <T extends Writeable<T>> void assertReadFromStream(final int ordinal, final Writeable<T> expected) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) { try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(ordinal); out.writeVInt(ordinal);
try (StreamInput in = StreamInput.wrap(out.bytes())) { try (StreamInput in = StreamInput.wrap(out.bytes())) {
assertThat(expected.readFrom(in), equalTo(expected)); assertThat(reader.read(in), equalTo(expected));
} }
} }
} }
} }

View File

@ -20,12 +20,14 @@
package org.elasticsearch.common.io.stream; package org.elasticsearch.common.io.stream;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Objects; import java.util.Objects;
import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.closeTo;
@ -275,6 +277,9 @@ public class BytesStreamsTests extends ESTestCase {
out.writeString("hello"); out.writeString("hello");
out.writeString("goodbye"); out.writeString("goodbye");
out.writeGenericValue(BytesRefs.toBytesRef("bytesref")); out.writeGenericValue(BytesRefs.toBytesRef("bytesref"));
out.writeStringArray(new String[] {"a", "b", "cat"});
out.writeBytesReference(new BytesArray("test"));
out.writeOptionalBytesReference(new BytesArray("test"));
final byte[] bytes = out.bytes().toBytes(); final byte[] bytes = out.bytes().toBytes();
StreamInput in = StreamInput.wrap(out.bytes().toBytes()); StreamInput in = StreamInput.wrap(out.bytes().toBytes());
assertEquals(in.available(), bytes.length); assertEquals(in.available(), bytes.length);
@ -296,6 +301,10 @@ public class BytesStreamsTests extends ESTestCase {
assertThat(in.readString(), equalTo("hello")); assertThat(in.readString(), equalTo("hello"));
assertThat(in.readString(), equalTo("goodbye")); assertThat(in.readString(), equalTo("goodbye"));
assertThat(in.readGenericValue(), equalTo((Object)BytesRefs.toBytesRef("bytesref"))); assertThat(in.readGenericValue(), equalTo((Object)BytesRefs.toBytesRef("bytesref")));
assertThat(in.readStringArray(), equalTo(new String[] {"a", "b", "cat"}));
assertThat(in.readBytesReference(), equalTo(new BytesArray("test")));
assertThat(in.readOptionalBytesReference(), equalTo(new BytesArray("test")));
assertEquals(0, in.available());
in.close(); in.close();
out.close(); out.close();
} }

View File

@ -255,11 +255,6 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
protected void configureSearch() { protected void configureSearch() {
// Skip me // Skip me
} }
@Override
protected void configureSuggesters() {
// Skip me
}
}, },
new AbstractModule() { new AbstractModule() {
@Override @Override

View File

@ -48,7 +48,7 @@ public class SearchModuleTests extends ModuleTestCase {
} }
try { try {
module.registerSuggester("term", PhraseSuggester.PROTOTYPE); module.registerSuggester("term", PhraseSuggester.INSTANCE);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [term]"); assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [term]");
} }
@ -57,12 +57,9 @@ public class SearchModuleTests extends ModuleTestCase {
public void testRegisterSuggester() { public void testRegisterSuggester() {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry());
module.registerSuggester("custom", CustomSuggester.PROTOTYPE); module.registerSuggester("custom", CustomSuggester.PROTOTYPE);
try { IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
module.registerSuggester("custom", CustomSuggester.PROTOTYPE); () -> module.registerSuggester("custom", CustomSuggester.PROTOTYPE));
} catch (IllegalArgumentException e) { assertEquals("Can't register the same [suggester] more than once for [custom]", e.getMessage());
assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [custom]");
}
assertMapMultiBinding(module, Suggester.class, CustomSuggester.class);
} }
public void testRegisterHighlighter() { public void testRegisterHighlighter() {

View File

@ -159,11 +159,6 @@ public class AggregatorParsingTests extends ESTestCase {
protected void configureSearch() { protected void configureSearch() {
// Skip me // Skip me
} }
@Override
protected void configureSuggesters() {
// Skip me
}
}, new IndexSettingsModule(index, settings), }, new IndexSettingsModule(index, settings),
new AbstractModule() { new AbstractModule() {

View File

@ -173,11 +173,6 @@ public abstract class BaseAggregationTestCase<AB extends AggregatorBuilder<AB>>
protected void configureSearch() { protected void configureSearch() {
// Skip me // Skip me
} }
@Override
protected void configureSuggesters() {
// Skip me
}
}, },
new IndexSettingsModule(index, settings), new IndexSettingsModule(index, settings),

View File

@ -174,11 +174,6 @@ public abstract class BasePipelineAggregationTestCase<AF extends PipelineAggrega
protected void configureSearch() { protected void configureSearch() {
// Skip me // Skip me
} }
@Override
protected void configureSuggesters() {
// Skip me
}
}, },
new IndexSettingsModule(index, settings), new IndexSettingsModule(index, settings),
new AbstractModule() { new AbstractModule() {

View File

@ -213,6 +213,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
injector = null; injector = null;
index = null; index = null;
aggParsers = null; aggParsers = null;
suggesters = null;
currentTypes = null; currentTypes = null;
namedWriteableRegistry = null; namedWriteableRegistry = null;
} }

View File

@ -31,26 +31,15 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptServiceTests.TestEngineService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
@ -67,23 +56,10 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
*/ */
@BeforeClass @BeforeClass
public static void init() throws IOException { public static void init() throws IOException {
Path genericConfigFolder = createTempDir();
Settings baseSettings = settingsBuilder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder)
.build();
Environment environment = new Environment(baseSettings);
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry
.ScriptEngineRegistration(TestEngineService.class, TestEngineService.TYPES)));
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
suggesters = new Suggesters(Collections.emptyMap());
namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry = new NamedWriteableRegistry();
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); SearchModule searchModule = new SearchModule(Settings.EMPTY, namedWriteableRegistry);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE); queriesRegistry = searchModule.buildQueryParserRegistry();
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE); suggesters = searchModule.getSuggesters();
queriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry();
parseFieldMatcher = ParseFieldMatcher.STRICT; parseFieldMatcher = ParseFieldMatcher.STRICT;
} }

View File

@ -27,6 +27,7 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.suggest.CompletionSuggestSearchIT.CompletionMappingBuilder; import org.elasticsearch.search.suggest.CompletionSuggestSearchIT.CompletionMappingBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
@ -36,7 +37,6 @@ import org.elasticsearch.search.suggest.completion.context.ContextBuilder;
import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoContextMapping;
import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException; import java.io.IOException;
@ -280,7 +280,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
// query context order should never matter // query context order should never matter
Map<String, List<? extends QueryContext>> contextMap = new HashMap<>(); Map<String, List<? extends ToXContent>> contextMap = new HashMap<>();
contextMap.put("type", Collections.singletonList(CategoryQueryContext.builder().setCategory("type2").build())); contextMap.put("type", Collections.singletonList(CategoryQueryContext.builder().setCategory("type2").build()));
contextMap.put("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat2").build())); contextMap.put("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat2").build()));
multiContextFilterSuggest.contexts(contextMap); multiContextFilterSuggest.contexts(contextMap);
@ -331,7 +331,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
// boost on both contexts // boost on both contexts
CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
// query context order should never matter // query context order should never matter
Map<String, List<? extends QueryContext>> contextMap = new HashMap<>(); Map<String, List<? extends ToXContent>> contextMap = new HashMap<>();
contextMap.put("type", Arrays.asList( contextMap.put("type", Arrays.asList(
CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(), CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(),
CategoryQueryContext.builder().setCategory("type1").setBoost(4).build()) CategoryQueryContext.builder().setCategory("type1").setBoost(4).build())

View File

@ -20,16 +20,16 @@ package org.elasticsearch.search.suggest;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.CustomSuggesterSearchIT.CustomSuggestionBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
/**
*
*/
public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestionsContext> { public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestionsContext> {
public static CustomSuggester PROTOTYPE = new CustomSuggester(); public static CustomSuggester PROTOTYPE = new CustomSuggester();
@ -65,7 +65,12 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
} }
@Override @Override
public SuggestionBuilder<?> getBuilderPrototype() { public SuggestionBuilder<?> innerFromXContent(QueryParseContext context) throws IOException {
return CustomSuggesterSearchIT.CustomSuggestionBuilder.PROTOTYPE; return CustomSuggestionBuilder.innerFromXContent(context);
}
@Override
public SuggestionBuilder<?> read(StreamInput in) throws IOException {
return new CustomSuggestionBuilder(in);
} }
} }

View File

@ -102,6 +102,19 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase {
this.randomSuffix = randomSuffix; this.randomSuffix = randomSuffix;
} }
/**
* Read from a stream.
*/
public CustomSuggestionBuilder(StreamInput in) throws IOException {
super(in);
this.randomSuffix = in.readString();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeString(randomSuffix);
}
@Override @Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix); builder.field(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix);
@ -113,16 +126,6 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase {
return "custom"; return "custom";
} }
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeString(randomSuffix);
}
@Override
public CustomSuggestionBuilder doReadFrom(StreamInput in, String field) throws IOException {
return new CustomSuggestionBuilder(field, in.readString());
}
@Override @Override
protected boolean doEquals(CustomSuggestionBuilder other) { protected boolean doEquals(CustomSuggestionBuilder other) {
return Objects.equals(randomSuffix, other.randomSuffix); return Objects.equals(randomSuffix, other.randomSuffix);
@ -133,8 +136,7 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase {
return Objects.hash(randomSuffix); return Objects.hash(randomSuffix);
} }
@Override static CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher(); ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
XContentParser.Token token; XContentParser.Token token;

View File

@ -51,25 +51,21 @@ import java.util.Map.Entry;
public class SuggestBuilderTests extends WritableTestCase<SuggestBuilder> { public class SuggestBuilderTests extends WritableTestCase<SuggestBuilder> {
private static NamedWriteableRegistry namedWriteableRegistry; private static NamedWriteableRegistry namedWriteableRegistry;
private static Suggesters suggesters;
/** /**
* Setup for the whole base test class. * Setup for the whole base test class.
*/ */
@BeforeClass @BeforeClass
public static void init() { public static void init() {
NamedWriteableRegistry nwRegistry = new NamedWriteableRegistry(); namedWriteableRegistry = new NamedWriteableRegistry();
nwRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE); suggesters = new Suggesters(namedWriteableRegistry);
nwRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE);
nwRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE);
nwRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE);
nwRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
nwRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
namedWriteableRegistry = nwRegistry;
} }
@AfterClass @AfterClass
public static void afterClass() { public static void afterClass() {
namedWriteableRegistry = null; namedWriteableRegistry = null;
suggesters = null;
} }
@Override @Override
@ -81,7 +77,6 @@ public class SuggestBuilderTests extends WritableTestCase<SuggestBuilder> {
* creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original * creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original
*/ */
public void testFromXContent() throws IOException { public void testFromXContent() throws IOException {
Suggesters suggesters = new Suggesters(Collections.emptyMap());
QueryParseContext context = new QueryParseContext(null); QueryParseContext context = new QueryParseContext(null);
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) { for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext;
import java.io.IOException; import java.io.IOException;
@ -39,8 +40,8 @@ public class CategoryQueryContextTests extends QueryContextTestCase<CategoryQuer
} }
@Override @Override
protected CategoryQueryContext prototype() { protected CategoryQueryContext fromXContent(XContentParser parser) throws IOException {
return CategoryQueryContext.PROTOTYPE; return CategoryQueryContext.fromXContent(parser);
} }
public void testNullCategoryIsIllegal() { public void testNullCategoryIsIllegal() {

View File

@ -20,21 +20,14 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase;
import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping;
import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext; import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import org.elasticsearch.search.suggest.completion.context.GeoContextMapping;
import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -43,9 +36,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase<CompletionSuggestionBuilder> { public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase<CompletionSuggestionBuilder> {
@ -86,7 +77,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe
List<String> payloads = new ArrayList<>(); List<String> payloads = new ArrayList<>();
Collections.addAll(payloads, generateRandomStringArray(5, 10, false, false)); Collections.addAll(payloads, generateRandomStringArray(5, 10, false, false));
maybeSet(testBuilder::payload, payloads); maybeSet(testBuilder::payload, payloads);
Map<String, List<? extends QueryContext>> contextMap = new HashMap<>(); Map<String, List<? extends ToXContent>> contextMap = new HashMap<>();
if (randomBoolean()) { if (randomBoolean()) {
int numContext = randomIntBetween(1, 5); int numContext = randomIntBetween(1, 5);
List<CategoryQueryContext> contexts = new ArrayList<>(numContext); List<CategoryQueryContext> contexts = new ArrayList<>(numContext);

View File

@ -83,7 +83,7 @@ public class FuzzyOptionsTests extends WritableTestCase<FuzzyOptions> {
@Override @Override
protected FuzzyOptions readFrom(StreamInput in) throws IOException { protected FuzzyOptions readFrom(StreamInput in) throws IOException {
return FuzzyOptions.readFuzzyOptions(in); return new FuzzyOptions(in);
} }
public void testIllegalArguments() { public void testIllegalArguments() {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.suggest.completion.context.GeoQueryContext; import org.elasticsearch.search.suggest.completion.context.GeoQueryContext;
import java.io.IOException; import java.io.IOException;
@ -50,8 +51,8 @@ public class GeoQueryContextTests extends QueryContextTestCase<GeoQueryContext>
} }
@Override @Override
protected GeoQueryContext prototype() { protected GeoQueryContext fromXContent(XContentParser parser) throws IOException {
return GeoQueryContext.PROTOTYPE; return GeoQueryContext.fromXContent(parser);
} }
public void testNullGeoPointIsIllegal() { public void testNullGeoPointIsIllegal() {

View File

@ -20,21 +20,16 @@
package org.elasticsearch.search.suggest.completion; package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
import static junit.framework.TestCase.assertEquals;
public abstract class QueryContextTestCase<QC extends QueryContext> extends ESTestCase {
public abstract class QueryContextTestCase<QC extends ToXContent> extends ESTestCase {
private static final int NUMBER_OF_RUNS = 20; private static final int NUMBER_OF_RUNS = 20;
/** /**
@ -43,19 +38,19 @@ public abstract class QueryContextTestCase<QC extends QueryContext> extends ESTe
protected abstract QC createTestModel(); protected abstract QC createTestModel();
/** /**
* query context prototype to read serialized format * read the context
*/ */
protected abstract QC prototype(); protected abstract QC fromXContent(XContentParser parser) throws IOException;
public void testToXContext() throws IOException { public void testToXContext() throws IOException {
for (int i = 0; i < NUMBER_OF_RUNS; i++) { for (int i = 0; i < NUMBER_OF_RUNS; i++) {
QueryContext toXContent = createTestModel(); QC toXContent = createTestModel();
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS); toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS);
BytesReference bytesReference = builder.bytes(); BytesReference bytesReference = builder.bytes();
XContentParser parser = XContentFactory.xContent(bytesReference).createParser(bytesReference); XContentParser parser = XContentFactory.xContent(bytesReference).createParser(bytesReference);
parser.nextToken(); parser.nextToken();
QueryContext fromXContext = prototype().fromXContext(parser); QC fromXContext = fromXContent(parser);
assertEquals(toXContent, fromXContext); assertEquals(toXContent, fromXContext);
assertEquals(toXContent.hashCode(), fromXContext.hashCode()); assertEquals(toXContent.hashCode(), fromXContext.hashCode());
} }

View File

@ -56,7 +56,7 @@ public class RegexOptionsTests extends WritableTestCase<RegexOptions> {
@Override @Override
protected RegexOptions readFrom(StreamInput in) throws IOException { protected RegexOptions readFrom(StreamInput in) throws IOException {
return RegexOptions.readRegexOptions(in); return new RegexOptions(in);
} }
public void testIllegalArgument() { public void testIllegalArgument() {

View File

@ -50,7 +50,7 @@ public abstract class WritableTestCase<M extends Writeable> extends ESTestCase {
protected abstract M createMutation(M original) throws IOException; protected abstract M createMutation(M original) throws IOException;
/** /**
* model prototype to read serialized format * Read from a stream.
*/ */
protected abstract M readFrom(StreamInput in) throws IOException; protected abstract M readFrom(StreamInput in) throws IOException;

View File

@ -36,7 +36,6 @@ import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCan
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
@ -125,8 +124,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
XContentParser parser = XContentHelper.createParser(builder.bytes()); XContentParser parser = XContentHelper.createParser(builder.bytes());
context.reset(parser); context.reset(parser);
parser.nextToken(); parser.nextToken();
DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PROTOTYPE DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.fromXContent(context);
.fromXContent(context);
assertNotSame(generator, secondGenerator); assertNotSame(generator, secondGenerator);
assertEquals(generator, secondGenerator); assertEquals(generator, secondGenerator);
assertEquals(generator.hashCode(), secondGenerator.hashCode()); assertEquals(generator.hashCode(), secondGenerator.hashCode());
@ -161,62 +159,37 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
// test missing fieldname // test missing fieldname
String directGenerator = "{ }"; String directGenerator = "{ }";
XContentParser parser = XContentFactory.xContent(directGenerator).createParser(directGenerator); XContentParser parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
context.reset(parser); context.reset(parser);
try { Exception e = expectThrows(IllegalArgumentException.class, () -> DirectCandidateGeneratorBuilder.fromXContent(context));
DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context); assertEquals("[direct_generator] expects exactly one field parameter, but found []", e.getMessage());
fail("expected an exception");
} catch (IllegalArgumentException e) {
assertEquals("[direct_generator] expects exactly one field parameter, but found []", e.getMessage());
}
// test two fieldnames // test two fieldnames
directGenerator = "{ \"field\" : \"f1\", \"field\" : \"f2\" }"; directGenerator = "{ \"field\" : \"f1\", \"field\" : \"f2\" }";
parser = XContentFactory.xContent(directGenerator).createParser(directGenerator); parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
context.reset(parser); context.reset(parser);
try { e = expectThrows(IllegalArgumentException.class, () -> DirectCandidateGeneratorBuilder.fromXContent(context));
DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context); assertEquals("[direct_generator] expects exactly one field parameter, but found [f2, f1]", e.getMessage());
fail("expected an exception");
} catch (IllegalArgumentException e) {
assertEquals("[direct_generator] expects exactly one field parameter, but found [f2, f1]", e.getMessage());
}
// test unknown field // test unknown field
directGenerator = "{ \"unknown_param\" : \"f1\" }"; directGenerator = "{ \"unknown_param\" : \"f1\" }";
parser = XContentFactory.xContent(directGenerator).createParser(directGenerator); parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
context.reset(parser); context.reset(parser);
try { e = expectThrows(IllegalArgumentException.class, () -> DirectCandidateGeneratorBuilder.fromXContent(context));
DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context); assertEquals("[direct_generator] unknown field [unknown_param], parser not found", e.getMessage());
fail("expected an exception");
} catch (IllegalArgumentException e) {
assertEquals("[direct_generator] unknown field [unknown_param], parser not found", e.getMessage());
}
// test bad value for field (e.g. size expects an int) // test bad value for field (e.g. size expects an int)
directGenerator = "{ \"size\" : \"xxl\" }"; directGenerator = "{ \"size\" : \"xxl\" }";
parser = XContentFactory.xContent(directGenerator).createParser(directGenerator); parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
context.reset(parser); context.reset(parser);
try { e = expectThrows(ParsingException.class, () -> DirectCandidateGeneratorBuilder.fromXContent(context));
DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context); assertEquals("[direct_generator] failed to parse field [size]", e.getMessage());
fail("expected an exception");
} catch (ParsingException e) {
assertEquals("[direct_generator] failed to parse field [size]", e.getMessage());
}
// test unexpected token // test unexpected token
directGenerator = "{ \"size\" : [ \"xxl\" ] }"; directGenerator = "{ \"size\" : [ \"xxl\" ] }";
parser = XContentFactory.xContent(directGenerator).createParser(directGenerator); parser = XContentFactory.xContent(directGenerator).createParser(directGenerator);
context.reset(parser); context.reset(parser);
try { e = expectThrows(IllegalArgumentException.class, () -> DirectCandidateGeneratorBuilder.fromXContent(context));
DirectCandidateGeneratorBuilder.PROTOTYPE.fromXContent(context); assertEquals("[direct_generator] size doesn't support values of type: START_ARRAY", e.getMessage());
fail("expected an exception");
} catch (IllegalArgumentException e) {
assertEquals("[direct_generator] size doesn't support values of type: START_ARRAY", e.getMessage());
}
} }
/** /**
@ -234,9 +207,9 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
maybeSet(generator::preFilter, randomAsciiOfLengthBetween(1, 20)); maybeSet(generator::preFilter, randomAsciiOfLengthBetween(1, 20));
maybeSet(generator::postFilter, randomAsciiOfLengthBetween(1, 20)); maybeSet(generator::postFilter, randomAsciiOfLengthBetween(1, 20));
maybeSet(generator::size, randomIntBetween(1, 20)); maybeSet(generator::size, randomIntBetween(1, 20));
maybeSet(generator::sort, randomFrom(Arrays.asList(new String[]{ "score", "frequency" }))); maybeSet(generator::sort, randomFrom("score", "frequency"));
maybeSet(generator::stringDistance, randomFrom(Arrays.asList(new String[]{ "internal", "damerau_levenshtein", "levenstein", "jarowinkler", "ngram"}))); maybeSet(generator::stringDistance, randomFrom("internal", "damerau_levenshtein", "levenstein", "jarowinkler", "ngram"));
maybeSet(generator::suggestMode, randomFrom(Arrays.asList(new String[]{ "missing", "popular", "always"}))); maybeSet(generator::suggestMode, randomFrom("missing", "popular", "always"));
return generator; return generator;
} }
@ -244,7 +217,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
try (BytesStreamOutput output = new BytesStreamOutput()) { try (BytesStreamOutput output = new BytesStreamOutput()) {
original.writeTo(output); original.writeTo(output);
try (StreamInput in = StreamInput.wrap(output.bytes())) { try (StreamInput in = StreamInput.wrap(output.bytes())) {
return DirectCandidateGeneratorBuilder.PROTOTYPE.readFrom(in); return new DirectCandidateGeneratorBuilder(in);
} }
} }
} }

View File

@ -19,6 +19,10 @@
package org.elasticsearch.search.suggest.phrase; package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class LaplaceModelTests extends SmoothingModelTestCase { public class LaplaceModelTests extends SmoothingModelTestCase {
@ -48,4 +52,9 @@ public class LaplaceModelTests extends SmoothingModelTestCase {
assertThat(wordScorer, instanceOf(LaplaceScorer.class)); assertThat(wordScorer, instanceOf(LaplaceScorer.class));
assertEquals(model.getAlpha(), ((LaplaceScorer) wordScorer).alpha(), Double.MIN_VALUE); assertEquals(model.getAlpha(), ((LaplaceScorer) wordScorer).alpha(), Double.MIN_VALUE);
} }
@Override
protected SmoothingModel fromXContent(QueryParseContext context) throws IOException {
return Laplace.innerFromXContent(context);
}
} }

View File

@ -19,6 +19,10 @@
package org.elasticsearch.search.suggest.phrase; package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class LinearInterpolationModelTests extends SmoothingModelTestCase { public class LinearInterpolationModelTests extends SmoothingModelTestCase {
@ -67,4 +71,9 @@ public class LinearInterpolationModelTests extends SmoothingModelTestCase {
assertEquals(testModel.getBigramLambda(), (testScorer).bigramLambda(), 1e-15); assertEquals(testModel.getBigramLambda(), (testScorer).bigramLambda(), 1e-15);
assertEquals(testModel.getUnigramLambda(), (testScorer).unigramLambda(), 1e-15); assertEquals(testModel.getUnigramLambda(), (testScorer).unigramLambda(), 1e-15);
} }
@Override
protected SmoothingModel fromXContent(QueryParseContext context) throws IOException {
return LinearInterpolation.innerFromXContent(context);
}
} }

View File

@ -21,26 +21,12 @@ package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.script.Template; import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator;
import java.util.Map; import java.util.Map;
import static org.hamcrest.Matchers.instanceOf;
public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase<PhraseSuggestionBuilder> { public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase<PhraseSuggestionBuilder> {
@BeforeClass
public static void initSmoothingModels() {
namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
}
@Override @Override
protected PhraseSuggestionBuilder randomSuggestionBuilder() { protected PhraseSuggestionBuilder randomSuggestionBuilder() {
return randomPhraseSuggestionBuilder(); return randomPhraseSuggestionBuilder();
@ -162,81 +148,40 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC
public void testInvalidParameters() throws IOException { public void testInvalidParameters() throws IOException {
// test missing field name // test missing field name
try { Exception e = expectThrows(NullPointerException.class, () -> new PhraseSuggestionBuilder((String) null));
new PhraseSuggestionBuilder(null); assertEquals("suggestion requires a field name", e.getMessage());
fail("Should not allow null as field name");
} catch (NullPointerException e) {
assertEquals("suggestion requires a field name", e.getMessage());
}
// test emtpy field name // test empty field name
try { e = expectThrows(IllegalArgumentException.class, () -> new PhraseSuggestionBuilder(""));
new PhraseSuggestionBuilder(""); assertEquals("suggestion field name is empty", e.getMessage());
fail("Should not allow empty string as field name");
} catch (IllegalArgumentException e) {
assertEquals("suggestion field name is empty", e.getMessage());
}
PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
try {
builder.gramSize(0);
fail("Should not allow gramSize < 1");
} catch (IllegalArgumentException e) {
assertEquals("gramSize must be >= 1", e.getMessage());
}
try { e = expectThrows(IllegalArgumentException.class, () -> builder.gramSize(0));
builder.gramSize(-1); assertEquals("gramSize must be >= 1", e.getMessage());
fail("Should not allow gramSize < 1"); e = expectThrows(IllegalArgumentException.class, () -> builder.gramSize(-1));
} catch (IllegalArgumentException e) { assertEquals("gramSize must be >= 1", e.getMessage());
assertEquals("gramSize must be >= 1", e.getMessage());
}
try { e = expectThrows(IllegalArgumentException.class, () -> builder.maxErrors(-1));
builder.maxErrors(-1); assertEquals("max_error must be > 0.0", e.getMessage());
fail("Should not allow maxErrors < 0");
} catch (IllegalArgumentException e) {
assertEquals("max_error must be > 0.0", e.getMessage());
}
try { e = expectThrows(NullPointerException.class, () -> builder.separator(null));
builder.separator(null); assertEquals("separator cannot be set to null", e.getMessage());
fail("Should not allow null as separator");
} catch (NullPointerException e) {
assertEquals("separator cannot be set to null", e.getMessage());
}
try { e = expectThrows(IllegalArgumentException.class, () -> builder.realWordErrorLikelihood(-1));
builder.realWordErrorLikelihood(-1); assertEquals("real_word_error_likelihood must be > 0.0", e.getMessage());
fail("Should not allow real world error likelihood < 0");
} catch (IllegalArgumentException e) {
assertEquals("real_word_error_likelihood must be > 0.0", e.getMessage());
}
try { e = expectThrows(IllegalArgumentException.class, () -> builder.confidence(-1));
builder.confidence(-1); assertEquals("confidence must be >= 0.0", e.getMessage());
fail("Should not allow confidence < 0");
} catch (IllegalArgumentException e) {
assertEquals("confidence must be >= 0.0", e.getMessage());
}
try { e = expectThrows(IllegalArgumentException.class, () -> builder.tokenLimit(0));
builder.tokenLimit(0); assertEquals("token_limit must be >= 1", e.getMessage());
fail("token_limit must be >= 1");
} catch (IllegalArgumentException e) {
assertEquals("token_limit must be >= 1", e.getMessage());
}
try { e = expectThrows(IllegalArgumentException.class, () -> builder.highlight(null, "</b>"));
if (randomBoolean()) { assertEquals("Pre and post tag must both be null or both not be null.", e.getMessage());
builder.highlight(null, "</b>");
} else { e = expectThrows(IllegalArgumentException.class, () -> builder.highlight("<b>", null));
builder.highlight("<b>", null); assertEquals("Pre and post tag must both be null or both not be null.", e.getMessage());
}
fail("Pre and post tag must both be null or both not be null.");
} catch (IllegalArgumentException e) {
assertEquals("Pre and post tag must both be null or both not be null.", e.getMessage());
}
} }
} }

View File

@ -45,6 +45,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -68,9 +69,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
public static void init() { public static void init() {
if (namedWriteableRegistry == null) { if (namedWriteableRegistry == null) {
namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry = new NamedWriteableRegistry();
namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE); new Suggesters(namedWriteableRegistry);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
} }
} }
@ -89,6 +88,8 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
*/ */
protected abstract SmoothingModel createMutation(SmoothingModel original) throws IOException; protected abstract SmoothingModel createMutation(SmoothingModel original) throws IOException;
protected abstract SmoothingModel fromXContent(QueryParseContext context) throws IOException;
/** /**
* Test that creates new smoothing model from a random test smoothing model and checks both for equality * Test that creates new smoothing model from a random test smoothing model and checks both for equality
*/ */
@ -108,7 +109,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
XContentParser parser = XContentHelper.createParser(contentBuilder.bytes()); XContentParser parser = XContentHelper.createParser(contentBuilder.bytes());
context.reset(parser); context.reset(parser);
parser.nextToken(); // go to start token, real parsing would do that in the outer element parser parser.nextToken(); // go to start token, real parsing would do that in the outer element parser
SmoothingModel parsedModel = testModel.innerFromXContent(context); SmoothingModel parsedModel = fromXContent(context);
assertNotSame(testModel, parsedModel); assertNotSame(testModel, parsedModel);
assertEquals(testModel, parsedModel); assertEquals(testModel, parsedModel);
assertEquals(testModel.hashCode(), parsedModel.hashCode()); assertEquals(testModel.hashCode(), parsedModel.hashCode());

View File

@ -19,6 +19,10 @@
package org.elasticsearch.search.suggest.phrase; package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.index.query.QueryParseContext;
import java.io.IOException;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class StupidBackoffModelTests extends SmoothingModelTestCase { public class StupidBackoffModelTests extends SmoothingModelTestCase {
@ -47,4 +51,9 @@ public class StupidBackoffModelTests extends SmoothingModelTestCase {
StupidBackoff testModel = (StupidBackoff) input; StupidBackoff testModel = (StupidBackoff) input;
assertEquals(testModel.getDiscount(), ((StupidBackoffScorer) wordScorer).discount(), Double.MIN_VALUE); assertEquals(testModel.getDiscount(), ((StupidBackoffScorer) wordScorer).discount(), Double.MIN_VALUE);
} }
@Override
protected SmoothingModel fromXContent(QueryParseContext context) throws IOException {
return StupidBackoff.innerFromXContent(context);
}
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.suggest.term; package org.elasticsearch.search.suggest.term;
import org.elasticsearch.common.io.stream.AbstractWriteableEnumTestCase; import org.elasticsearch.common.io.stream.AbstractWriteableEnumTestCase;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.search.suggest.SortBy; import org.elasticsearch.search.suggest.SortBy;
import java.io.IOException; import java.io.IOException;
@ -30,6 +31,9 @@ import static org.hamcrest.Matchers.equalTo;
* Test the {@link SortBy} enum. * Test the {@link SortBy} enum.
*/ */
public class SortByTests extends AbstractWriteableEnumTestCase { public class SortByTests extends AbstractWriteableEnumTestCase {
public SortByTests() {
super(SortBy::readFromStream);
}
@Override @Override
public void testValidOrdinals() { public void testValidOrdinals() {
@ -66,5 +70,4 @@ public class SortByTests extends AbstractWriteableEnumTestCase {
assertReadFromStream(0, SortBy.SCORE); assertReadFromStream(0, SortBy.SCORE);
assertReadFromStream(1, SortBy.FREQUENCY); assertReadFromStream(1, SortBy.FREQUENCY);
} }
} }

View File

@ -30,6 +30,9 @@ import static org.hamcrest.Matchers.equalTo;
* Test for the {@link StringDistanceImpl} enum. * Test for the {@link StringDistanceImpl} enum.
*/ */
public class StringDistanceImplTests extends AbstractWriteableEnumTestCase { public class StringDistanceImplTests extends AbstractWriteableEnumTestCase {
public StringDistanceImplTests() {
super(StringDistanceImpl::readFromStream);
}
@Override @Override
public void testValidOrdinals() { public void testValidOrdinals() {

View File

@ -30,6 +30,9 @@ import static org.hamcrest.Matchers.equalTo;
* Test the {@link SuggestMode} enum. * Test the {@link SuggestMode} enum.
*/ */
public class SuggestModeTests extends AbstractWriteableEnumTestCase { public class SuggestModeTests extends AbstractWriteableEnumTestCase {
public SuggestModeTests() {
super(SuggestMode::readFromStream);
}
@Override @Override
public void testValidOrdinals() { public void testValidOrdinals() {

View File

@ -20,11 +20,11 @@
package org.elasticsearch.search.suggest.term; package org.elasticsearch.search.suggest.term;
import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.SortBy; import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
@ -143,131 +143,52 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas
public void testInvalidParameters() throws IOException { public void testInvalidParameters() throws IOException {
// test missing field name // test missing field name
try { Exception e = expectThrows(NullPointerException.class, () -> new TermSuggestionBuilder((String) null));
new TermSuggestionBuilder(null); assertEquals("suggestion requires a field name", e.getMessage());
fail("Should not allow null as field name");
} catch (NullPointerException e) {
assertEquals("suggestion requires a field name", e.getMessage());
}
// test emtpy field name // test empty field name
try { e = expectThrows(IllegalArgumentException.class, () -> new TermSuggestionBuilder(""));
new TermSuggestionBuilder(""); assertEquals("suggestion field name is empty", e.getMessage());
fail("Should not allow empty string as field name");
} catch (IllegalArgumentException e) {
assertEquals("suggestion field name is empty", e.getMessage());
}
TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
// test invalid accuracy values // test invalid accuracy values
try { expectThrows(IllegalArgumentException.class, () -> builder.accuracy(-0.5f));
builder.accuracy(-0.5f); expectThrows(IllegalArgumentException.class, () -> builder.accuracy(1.1f));
fail("Should not allow accuracy to be set to a negative value.");
} catch (IllegalArgumentException e) {
}
try {
builder.accuracy(1.1f);
fail("Should not allow accuracy to be greater than 1.0.");
} catch (IllegalArgumentException e) {
}
// test invalid max edit distance values // test invalid max edit distance values
try { expectThrows(IllegalArgumentException.class, () -> builder.maxEdits(0));
builder.maxEdits(0); expectThrows(IllegalArgumentException.class, () -> builder.maxEdits(-1));
fail("Should not allow maxEdits to be less than 1."); expectThrows(IllegalArgumentException.class, () -> builder.maxEdits(3));
} catch (IllegalArgumentException e) {
}
try {
builder.maxEdits(-1);
fail("Should not allow maxEdits to be a negative value.");
} catch (IllegalArgumentException e) {
}
try {
builder.maxEdits(3);
fail("Should not allow maxEdits to be greater than 2.");
} catch (IllegalArgumentException e) {
}
// test invalid max inspections values // test invalid max inspections values
try { expectThrows(IllegalArgumentException.class, () -> builder.maxInspections(-1));
builder.maxInspections(-1);
fail("Should not allow maxInspections to be a negative value.");
} catch (IllegalArgumentException e) {
}
// test invalid max term freq values // test invalid max term freq values
try { expectThrows(IllegalArgumentException.class, () -> builder.maxTermFreq(-0.5f));
builder.maxTermFreq(-0.5f); expectThrows(IllegalArgumentException.class, () -> builder.maxTermFreq(1.5f));
fail("Should not allow max term freq to be a negative value."); builder.maxTermFreq(2.0f);
} catch (IllegalArgumentException e) {
}
try {
builder.maxTermFreq(1.5f);
fail("If max term freq is greater than 1, it must be a whole number.");
} catch (IllegalArgumentException e) {
}
try {
builder.maxTermFreq(2.0f); // this should be allowed
} catch (IllegalArgumentException e) {
fail("A max term freq greater than 1 that is a whole number should be allowed.");
}
// test invalid min doc freq values // test invalid min doc freq values
try { expectThrows(IllegalArgumentException.class, () -> builder.minDocFreq(-0.5f));
builder.minDocFreq(-0.5f); expectThrows(IllegalArgumentException.class, () -> builder.minDocFreq(1.5f));
fail("Should not allow min doc freq to be a negative value."); builder.minDocFreq(2.0f);
} catch (IllegalArgumentException e) {
}
try {
builder.minDocFreq(1.5f);
fail("If min doc freq is greater than 1, it must be a whole number.");
} catch (IllegalArgumentException e) {
}
try {
builder.minDocFreq(2.0f); // this should be allowed
} catch (IllegalArgumentException e) {
fail("A min doc freq greater than 1 that is a whole number should be allowed.");
}
// test invalid min word length values // test invalid min word length values
try { expectThrows(IllegalArgumentException.class, () -> builder.minWordLength(0));
builder.minWordLength(0); expectThrows(IllegalArgumentException.class, () -> builder.minWordLength(-1));
fail("A min word length < 1 should not be allowed.");
} catch (IllegalArgumentException e) {
}
try {
builder.minWordLength(-1);
fail("Should not allow min word length to be a negative value.");
} catch (IllegalArgumentException e) {
}
// test invalid prefix length values // test invalid prefix length values
try { expectThrows(IllegalArgumentException.class, () -> builder.prefixLength(-1));
builder.prefixLength(-1);
fail("Should not allow prefix length to be a negative value.");
} catch (IllegalArgumentException e) {
}
// test invalid size values // test invalid size values
try { expectThrows(IllegalArgumentException.class, () -> builder.size(0));
builder.size(0); expectThrows(IllegalArgumentException.class, () -> builder.size(-1));
fail("Size must be a positive value.");
} catch (IllegalArgumentException e) {
}
try {
builder.size(-1);
fail("Size must be a positive value.");
} catch (IllegalArgumentException e) {
}
// null values not allowed for enums // null values not allowed for enums
try { expectThrows(NullPointerException.class, () -> builder.sort(null));
builder.sort(null); expectThrows(NullPointerException.class, () -> builder.stringDistance(null));
fail("Should not allow setting a null sort value."); expectThrows(NullPointerException.class, () -> builder.suggestMode(null));
} catch (NullPointerException e) {
}
try {
builder.stringDistance(null);
fail("Should not allow setting a null string distance value.");
} catch (NullPointerException e) {
}
try {
builder.suggestMode(null);
fail("Should not allow setting a null suggest mode value.");
} catch (NullPointerException e) {
}
} }
public void testDefaultValuesSet() { public void testDefaultValuesSet() {

View File

@ -116,10 +116,6 @@ public class TemplateQueryParserTests extends ESTestCase {
protected void configureSearch() { protected void configureSearch() {
// skip so we don't need transport // skip so we don't need transport
} }
@Override
protected void configureSuggesters() {
// skip so we don't need IndicesService
}
}, },
scriptModule, scriptModule,
new IndexSettingsModule(index, settings), new IndexSettingsModule(index, settings),