nuke SuggestParseElement

This commit is contained in:
Areek Zillur 2016-03-11 16:27:33 -05:00
parent 97e2bab4cd
commit 4b803d75cf
23 changed files with 63 additions and 1087 deletions

View File

@ -794,6 +794,11 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} else {
SearchParseElement parseElement = this.elementParsers.get(currentFieldName);
if (parseElement == null) {
if (currentFieldName != null && currentFieldName.equals("suggest")) {
throw new SearchParseException(context,
"suggest is not supported in [ext], please use SearchSourceBuilder#suggest(SuggestBuilder) instead",
extParser.getTokenLocation());
}
throw new SearchParseException(context, "Unknown element [" + currentFieldName + "] in [ext]",
extParser.getTokenLocation());
} else {

View File

@ -1,29 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
public interface SuggestContextParser {
SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException;
}

View File

@ -1,136 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
*
*/
public final class SuggestParseElement implements SearchParseElement {
private Suggesters suggesters;
@Inject
public SuggestParseElement(Suggesters suggesters) {
this.suggesters = suggesters;
}
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.getQueryShardContext());
context.suggest(suggestionSearchContext);
}
public SuggestionSearchContext parseInternal(XContentParser parser, QueryShardContext shardContext) throws IOException {
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
MapperService mapperService = shardContext.getMapperService();
BytesRef globalText = null;
String fieldName = null;
Map<String, SuggestionContext> suggestionContexts = new HashMap<>();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if ("text".equals(fieldName)) {
globalText = parser.utf8Bytes();
} else {
throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
String suggestionName = fieldName;
BytesRef suggestText = null;
BytesRef prefix = null;
BytesRef regex = null;
SuggestionContext suggestionContext = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if ("text".equals(fieldName)) {
suggestText = parser.utf8Bytes();
} else if ("prefix".equals(fieldName)) {
prefix = parser.utf8Bytes();
} else if ("regex".equals(fieldName)) {
regex = parser.utf8Bytes();
} else {
throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (suggestionName == null) {
throw new IllegalArgumentException("Suggestion must have name");
}
if (suggesters.get(fieldName) == null) {
throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported");
}
final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();
suggestionContext = contextParser.parse(parser, shardContext);
}
}
if (suggestionContext != null) {
if (suggestText != null) {
suggestionContext.setText(suggestText);
}
if (prefix != null) {
suggestionContext.setPrefix(prefix);
}
if (regex != null) {
suggestionContext.setRegex(regex);
}
if (suggestText != null && prefix == null) {
suggestionContext.setPrefix(suggestText);
suggestionContext.setText(suggestText);
} else if (suggestText == null && prefix != null) {
suggestionContext.setPrefix(prefix);
suggestionContext.setText(prefix);
} else if (regex != null) {
suggestionContext.setRegex(regex);
suggestionContext.setText(regex);
}
suggestionContexts.put(suggestionName, suggestionContext);
} else {
throw new IllegalArgumentException("suggestion context could not be parsed correctly");
}
}
}
for (Map.Entry<String, SuggestionContext> entry : suggestionContexts.entrySet()) {
String suggestionName = entry.getKey();
SuggestionContext suggestionContext = entry.getValue();
SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext);
suggestionSearchContext.addSuggestion(suggestionName, suggestionContext);
}
return suggestionSearchContext;
}
}

View File

@ -37,28 +37,22 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonMap;
import static java.util.Collections.emptyMap;
/**
*/
public class SuggestPhase extends AbstractComponent implements SearchPhase {
private final Map<String, SearchParseElement> parseElements;
private final SuggestParseElement parseElement;
@Inject
public SuggestPhase(Settings settings, SuggestParseElement suggestParseElement) {
public SuggestPhase(Settings settings) {
super(settings);
this.parseElement = suggestParseElement;
parseElements = singletonMap("suggest", parseElement);
}
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
return parseElements;
}
public SuggestParseElement parseElement() {
return parseElement;
// this is used to parse SearchSourceBuilder.ext() bytes
// we don't allow any suggestion parsing for the extension
return emptyMap();
}
@Override

View File

@ -29,13 +29,6 @@ public abstract class Suggester<T extends SuggestionSearchContext.SuggestionCont
protected abstract Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException;
/**
* link the suggester to its corresponding {@link SuggestContextParser}
* TODO: This method should eventually be removed by {@link #getBuilderPrototype()} once
* we don't directly parse from xContent to the SuggestionContext any more
*/
public abstract SuggestContextParser getContextParser();
/**
* link the suggester to its corresponding {@link SuggestionBuilder}
*/

View File

@ -42,7 +42,7 @@ public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
@Inject
public Suggesters(Map<String, Suggester> suggesters) {
super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestParseElement.class, SuggestPhase.class);
super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestPhase.class);
this.parsers = Collections.unmodifiableMap(addBuildIns(suggesters));
}
@ -55,10 +55,6 @@ public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
return map;
}
public Suggester get(String type) {
return parsers.get(type);
}
public SuggestionBuilder<? extends SuggestionBuilder> getSuggestionPrototype(String suggesterName) {
Suggester<?> suggester = parsers.get(suggesterName);
if (suggester == null) {

View File

@ -1,143 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.RegexpFlag;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils.Fields;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.io.IOException;
/**
* Parses query options for {@link CompletionSuggester}
*
* Acceptable input:
* {
* "field" : STRING
* "size" : INT
* "fuzzy" : BOOLEAN | FUZZY_OBJECT
* "contexts" : QUERY_CONTEXTS
* "regex" : REGEX_OBJECT
* }
*
* FUZZY_OBJECT : {
* "edit_distance" : STRING | INT
* "transpositions" : BOOLEAN
* "min_length" : INT
* "prefix_length" : INT
* "unicode_aware" : BOOLEAN
* "max_determinized_states" : INT
* }
*
* REGEX_OBJECT: {
* "flags" : REGEX_FLAGS
* "max_determinized_states" : INT
* }
*
* see {@link RegexpFlag} for REGEX_FLAGS
*/
public class CompletionSuggestParser implements SuggestContextParser {
private static ObjectParser<CompletionSuggestionContext, ContextAndSuggest> TLP_PARSER = new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null);
static {
TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, CompletionSuggestionBuilder.PAYLOAD_FIELD);
TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> {
if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
if (parser.booleanValue()) {
completionSuggestionContext.setFuzzyOptions(new FuzzyOptions.Builder().build());
}
} else {
completionSuggestionContext.setFuzzyOptions(FuzzyOptions.parse(parser));
}
},
FuzzyOptions.FUZZY_OPTIONS, ObjectParser.ValueType.OBJECT_OR_BOOLEAN);
TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> completionSuggestionContext.setRegexOptions(RegexOptions.parse(parser)),
RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT);
TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, Fields.FIELD);
TLP_PARSER.declareField((p, v, c) -> {
String analyzerName = p.text();
Analyzer analyzer = c.mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
v.setAnalyzer(analyzer);
}, Fields.ANALYZER, ObjectParser.ValueType.STRING);
TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setSize, Fields.SIZE);
TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setShardSize, Fields.SHARD_SIZE);
TLP_PARSER.declareField((p, v, c) -> {
// Copy the current structure. We will parse, once the mapping is provided
XContentBuilder builder = XContentFactory.contentBuilder(p.contentType());
builder.copyCurrentStructure(p);
BytesReference bytes = builder.bytes();
c.contextParser = XContentFactory.xContent(bytes).createParser(bytes);
p.skipChildren();
}, CompletionSuggestionBuilder.CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated
}
private static class ContextAndSuggest {
XContentParser contextParser;
final MapperService mapperService;
ContextAndSuggest(MapperService mapperService) {
this.mapperService = mapperService;
}
}
private final CompletionSuggester completionSuggester;
public CompletionSuggestParser(CompletionSuggester completionSuggester) {
this.completionSuggester = completionSuggester;
}
@Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException {
MapperService mapperService = shardContext.getMapperService();
final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(shardContext);
final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService);
TLP_PARSER.parse(parser, suggestion, contextAndSuggest);
final XContentParser contextParser = contextAndSuggest.contextParser;
MappedFieldType mappedFieldType = mapperService.fullName(suggestion.getField());
if (mappedFieldType == null) {
throw new ElasticsearchException("Field [" + suggestion.getField() + "] is not a completion suggest field");
} else if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) {
CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType;
if (type.hasContextMappings() == false && contextParser != null) {
throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context");
}
suggestion.setQueryContexts(CompletionSuggestionBuilder.parseQueryContexts(contextParser, type));
suggestion.setFieldType(type);
return suggestion;
} else {
throw new IllegalArgumentException("Field [" + suggestion.getField() + "] is not a completion suggest field");
}
}
}

View File

@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
@ -55,11 +54,6 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
public static final CompletionSuggester PROTOTYPE = new CompletionSuggester();
@Override
public SuggestContextParser getContextParser() {
return new CompletionSuggestParser(this);
}
@Override
protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name,
final CompletionSuggestionContext suggestionContext, final IndexSearcher searcher, CharsRefBuilder spare) throws IOException {

View File

@ -63,6 +63,16 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
static final ParseField PAYLOAD_FIELD = new ParseField("payload");
static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context");
/**
* {
* "field" : STRING
* "size" : INT
* "fuzzy" : BOOLEAN | FUZZY_OBJECT
* "contexts" : QUERY_CONTEXTS
* "regex" : REGEX_OBJECT
* "payload" : STRING_ARRAY
* }
*/
private static ObjectParser<CompletionSuggestionBuilder.InnerBuilder, Void> TLP_PARSER =
new ObjectParser<>(SUGGESTION_NAME, null);
static {
@ -261,8 +271,24 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType;
suggestionContext.setFieldType(type);
if (type.hasContextMappings() && contextBytes != null) {
XContentParser contextParser = XContentFactory.xContent(contextBytes).createParser(contextBytes);
suggestionContext.setQueryContexts(parseQueryContexts(contextParser, type));
try (XContentParser contextParser = XContentFactory.xContent(contextBytes).createParser(contextBytes)) {
if (type.hasContextMappings() && contextParser != null) {
ContextMappings contextMappings = type.getContextMappings();
contextParser.nextToken();
Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = new HashMap<>(contextMappings.size());
assert contextParser.currentToken() == XContentParser.Token.START_OBJECT;
XContentParser.Token currentToken;
String currentFieldName;
while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = contextParser.currentName();
final ContextMapping mapping = contextMappings.get(currentFieldName);
queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser));
}
}
suggestionContext.setQueryContexts(queryContexts);
}
}
} else if (contextBytes != null) {
throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context");
}
@ -335,26 +361,4 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
protected int doHashCode() {
return Objects.hash(payloadFields, fuzzyOptions, regexOptions, contextBytes);
}
static Map<String, List<ContextMapping.InternalQueryContext>> parseQueryContexts(
XContentParser contextParser, CompletionFieldMapper.CompletionFieldType type) throws IOException {
Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = Collections.emptyMap();
if (type.hasContextMappings() && contextParser != null) {
ContextMappings contextMappings = type.getContextMappings();
contextParser.nextToken();
queryContexts = new HashMap<>(contextMappings.size());
assert contextParser.currentToken() == XContentParser.Token.START_OBJECT;
XContentParser.Token currentToken;
String currentFieldName;
while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = contextParser.currentName();
final ContextMapping mapping = contextMappings.get(currentFieldName);
queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser));
}
}
contextParser.close();
}
return queryContexts;
}
}

View File

@ -46,6 +46,16 @@ public class FuzzyOptions implements ToXContent, Writeable<FuzzyOptions> {
private static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware");
private static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states");
/**
* fuzzy : {
* "edit_distance" : STRING | INT
* "transpositions" : BOOLEAN
* "min_length" : INT
* "prefix_length" : INT
* "unicode_aware" : BOOLEAN
* "max_determinized_states" : INT
* }
*/
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>(FUZZY_OPTIONS.getPreferredName(), Builder::new);
static {
PARSER.declareInt(Builder::setFuzzyMinLength, MIN_LENGTH_FIELD);

View File

@ -42,6 +42,12 @@ public class RegexOptions implements ToXContent, Writeable<RegexOptions> {
private static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value");
private static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states");
/**
* regex: {
* "flags" : STRING | INT
* "max_determinized_states" : INT
* }
*/
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>(REGEX_OPTIONS.getPreferredName(), Builder::new);
static {
PARSER.declareInt(Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES);

View File

@ -178,7 +178,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
protected long thresholdFrequency(long termFrequency, long dictionarySize) {
if (termFrequency > 0) {
return (long) Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1));
return Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1));
}
return 0;

View File

@ -1,356 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import java.io.IOException;
import java.util.Collections;
public final class PhraseSuggestParser implements SuggestContextParser {
private PhraseSuggester suggester;
public PhraseSuggestParser(PhraseSuggester suggester) {
this.suggester = suggester;
}
@Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException {
MapperService mapperService = shardContext.getMapperService();
ScriptService scriptService = shardContext.getScriptService();
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(shardContext);
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
XContentParser.Token token;
String fieldName = null;
boolean gramSizeSet = false;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) {
if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) {
suggestion.setRealWordErrorLikelihood(parser.floatValue());
if (suggestion.realworldErrorLikelyhood() <= 0.0) {
throw new IllegalArgumentException("real_word_error_likelihood must be > 0.0");
}
} else if ("confidence".equals(fieldName)) {
suggestion.setConfidence(parser.floatValue());
if (suggestion.confidence() < 0.0) {
throw new IllegalArgumentException("confidence must be >= 0.0");
}
} else if ("separator".equals(fieldName)) {
suggestion.setSeparator(new BytesRef(parser.text()));
} else if ("max_errors".equals(fieldName) || "maxErrors".equals(fieldName)) {
suggestion.setMaxErrors(parser.floatValue());
if (suggestion.maxErrors() <= 0.0) {
throw new IllegalArgumentException("max_error must be > 0.0");
}
} else if ("gram_size".equals(fieldName) || "gramSize".equals(fieldName)) {
suggestion.setGramSize(parser.intValue());
if (suggestion.gramSize() < 1) {
throw new IllegalArgumentException("gram_size must be >= 1");
}
gramSizeSet = true;
} else if ("force_unigrams".equals(fieldName) || "forceUnigrams".equals(fieldName)) {
suggestion.setRequireUnigram(parser.booleanValue());
} else if ("token_limit".equals(fieldName) || "tokenLimit".equals(fieldName)) {
int tokenLimit = parser.intValue();
if (tokenLimit <= 0) {
throw new IllegalArgumentException("token_limit must be >= 1");
}
suggestion.setTokenLimit(tokenLimit);
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]");
}
}
} else if (token == Token.START_ARRAY) {
if (parseFieldMatcher.match(fieldName, DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD)) {
// for now we only have a single type of generators
while ((token = parser.nextToken()) == Token.START_OBJECT) {
PhraseSuggestionContext.DirectCandidateGenerator generator = parseCandidateGenerator(parser, mapperService, parseFieldMatcher);
verifyGenerator(generator);
suggestion.addGenerator(generator);
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]");
}
} else if (token == Token.START_OBJECT) {
if ("smoothing".equals(fieldName)) {
parseSmoothingModel(parser, suggestion, fieldName);
} else if ("highlight".equals(fieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if ("pre_tag".equals(fieldName) || "preTag".equals(fieldName)) {
suggestion.setPreTag(parser.utf8Bytes());
} else if ("post_tag".equals(fieldName) || "postTag".equals(fieldName)) {
suggestion.setPostTag(parser.utf8Bytes());
} else {
throw new IllegalArgumentException(
"suggester[phrase][highlight] doesn't support field [" + fieldName + "]");
}
}
}
} else if ("collate".equals(fieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if ("query".equals(fieldName)) {
if (suggestion.getCollateQueryScript() != null) {
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
}
Template template = Template.parse(parser, parseFieldMatcher);
CompiledScript compiledScript = scriptService.compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap());
suggestion.setCollateQueryScript(compiledScript);
} else if ("params".equals(fieldName)) {
suggestion.setCollateScriptParams(parser.map());
} else if ("prune".equals(fieldName)) {
if (parser.isBooleanValue()) {
suggestion.setCollatePrune(parser.booleanValue());
} else {
throw new IllegalArgumentException("suggester[phrase][collate] prune must be either 'true' or 'false'");
}
} else {
throw new IllegalArgumentException(
"suggester[phrase][collate] doesn't support field [" + fieldName + "]");
}
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]");
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]");
}
}
if (suggestion.getField() == null) {
throw new IllegalArgumentException("The required field option is missing");
}
MappedFieldType fieldType = mapperService.fullName(suggestion.getField());
if (fieldType == null) {
throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]");
} else if (suggestion.getAnalyzer() == null) {
// no analyzer name passed in, so try the field's analyzer, or the default analyzer
if (fieldType.searchAnalyzer() == null) {
suggestion.setAnalyzer(mapperService.searchAnalyzer());
} else {
suggestion.setAnalyzer(fieldType.searchAnalyzer());
}
}
if (suggestion.model() == null) {
suggestion.setModel(StupidBackoffScorer.FACTORY);
}
if (!gramSizeSet || suggestion.generators().isEmpty()) {
final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils.getShingleFilterFactory(suggestion.getAnalyzer());
if (!gramSizeSet) {
// try to detect the shingle size
if (shingleFilterFactory != null) {
suggestion.setGramSize(shingleFilterFactory.getMaxShingleSize());
if (suggestion.getAnalyzer() == null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams()) {
throw new IllegalArgumentException("The default analyzer for field: [" + suggestion.getField() + "] doesn't emit unigrams. If this is intentional try to set the analyzer explicitly");
}
}
}
if (suggestion.generators().isEmpty()) {
if (shingleFilterFactory != null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams() && suggestion.getRequireUnigram()) {
throw new IllegalArgumentException("The default candidate generator for phrase suggest can't operate on field: [" + suggestion.getField() + "] since it doesn't emit unigrams. If this is intentional try to set the candidate generator field explicitly");
}
// use a default generator on the same field
DirectCandidateGenerator generator = new DirectCandidateGenerator();
generator.setField(suggestion.getField());
suggestion.addGenerator(generator);
}
}
return suggestion;
}
public void parseSmoothingModel(XContentParser parser, PhraseSuggestionContext suggestion, String fieldName) throws IOException {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
if ("linear".equals(fieldName)) {
ensureNoSmoothing(suggestion);
final double[] lambdas = new double[3];
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue()) {
if ("trigram_lambda".equals(fieldName) || "trigramLambda".equals(fieldName)) {
lambdas[0] = parser.doubleValue();
if (lambdas[0] < 0) {
throw new IllegalArgumentException("trigram_lambda must be positive");
}
} else if ("bigram_lambda".equals(fieldName) || "bigramLambda".equals(fieldName)) {
lambdas[1] = parser.doubleValue();
if (lambdas[1] < 0) {
throw new IllegalArgumentException("bigram_lambda must be positive");
}
} else if ("unigram_lambda".equals(fieldName) || "unigramLambda".equals(fieldName)) {
lambdas[2] = parser.doubleValue();
if (lambdas[2] < 0) {
throw new IllegalArgumentException("unigram_lambda must be positive");
}
} else {
throw new IllegalArgumentException(
"suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]");
}
}
}
double sum = 0.0d;
for (int i = 0; i < lambdas.length; i++) {
sum += lambdas[i];
}
if (Math.abs(sum - 1.0) > 0.001) {
throw new IllegalArgumentException("linear smoothing lambdas must sum to 1");
}
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, lambdas[0], lambdas[1],
lambdas[2]);
}
});
} else if ("laplace".equals(fieldName)) {
ensureNoSmoothing(suggestion);
double theAlpha = Laplace.DEFAULT_LAPLACE_ALPHA;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && "alpha".equals(fieldName)) {
theAlpha = parser.doubleValue();
}
}
final double alpha = theAlpha;
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha);
}
});
} else if ("stupid_backoff".equals(fieldName) || "stupidBackoff".equals(fieldName)) {
ensureNoSmoothing(suggestion);
double theDiscount = StupidBackoff.DEFAULT_BACKOFF_DISCOUNT;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && "discount".equals(fieldName)) {
theDiscount = parser.doubleValue();
}
}
final double discount = theDiscount;
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount);
}
});
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
}
}
}
}
private void ensureNoSmoothing(PhraseSuggestionContext suggestion) {
if (suggestion.model() != null) {
throw new IllegalArgumentException("only one smoothing model supported");
}
}
private void verifyGenerator(PhraseSuggestionContext.DirectCandidateGenerator suggestion) {
// Verify options and set defaults
if (suggestion.field() == null) {
throw new IllegalArgumentException("The required field option is missing");
}
}
static PhraseSuggestionContext.DirectCandidateGenerator parseCandidateGenerator(XContentParser parser, MapperService mapperService,
ParseFieldMatcher parseFieldMatcher) throws IOException {
XContentParser.Token token;
String fieldName = null;
PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator();
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue()) {
if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) {
if ("field".equals(fieldName)) {
generator.setField(parser.text());
if (mapperService.fullName(generator.field()) == null) {
throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]");
}
} else if ("size".equals(fieldName)) {
generator.size(parser.intValue());
} else if ("pre_filter".equals(fieldName) || "preFilter".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
generator.preFilter(analyzer);
} else if ("post_filter".equals(fieldName) || "postFilter".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
generator.postFilter(analyzer);
} else {
throw new IllegalArgumentException("CandidateGenerator doesn't support [" + fieldName + "]");
}
}
}
}
return generator;
}
}

View File

@ -38,7 +38,6 @@ import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
@ -144,11 +143,6 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore);
}
@Override
public SuggestContextParser getContextParser() {
return new PhraseSuggestParser(this);
}
@Override
public SuggestionBuilder<?> getBuilderPrototype() {
return PhraseSuggestionBuilder.PROTOTYPE;

View File

@ -77,7 +77,7 @@ public abstract class WordScorer {
}
return candidate.stringDistance;
}
public double score(Candidate[] path, CandidateSet[] candidateSet, int at, int gramSize) throws IOException {
if (at == 0 || gramSize == 1) {
return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreUnigram(path[at]));
@ -87,21 +87,21 @@ public abstract class WordScorer {
return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreTrigram(path[at], path[at - 1], path[at - 2]));
}
}
protected double scoreUnigram(Candidate word) throws IOException {
return (1.0 + frequency(word.term)) / (vocabluarySize + numTerms);
}
protected double scoreBigram(Candidate word, Candidate w_1) throws IOException {
return scoreUnigram(word);
}
protected double scoreTrigram(Candidate word, Candidate w_1, Candidate w_2) throws IOException {
return scoreBigram(word, w_1);
}
public static interface WordScorerFactory {
public WordScorer newScorer(IndexReader reader, Terms terms,
String field, double realWordLikelyhood, BytesRef separator) throws IOException;
public interface WordScorerFactory {
WordScorer newScorer(IndexReader reader, Terms terms,
String field, double realWordLikelyhood, BytesRef separator) throws IOException;
}
}

View File

@ -1,68 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.term;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.io.IOException;
public final class TermSuggestParser implements SuggestContextParser {
private TermSuggester suggester;
public TermSuggestParser(TermSuggester suggester) {
this.suggester = suggester;
}
@Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException {
MapperService mapperService = shardContext.getMapperService();
XContentParser.Token token;
String fieldName = null;
TermSuggestionContext suggestion = new TermSuggestionContext(shardContext);
DirectSpellcheckerSettings settings = suggestion.getDirectSpellCheckerSettings();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
parseTokenValue(parser, mapperService, fieldName, suggestion, settings, mapperService.getIndexSettings().getParseFieldMatcher());
} else {
throw new IllegalArgumentException("suggester[term] doesn't support field [" + fieldName + "]");
}
}
return suggestion;
}
private void parseTokenValue(XContentParser parser, MapperService mapperService, String fieldName, TermSuggestionContext suggestion,
DirectSpellcheckerSettings settings, ParseFieldMatcher parseFieldMatcher) throws IOException {
if (!(SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher) || SuggestUtils.parseDirectSpellcheckerSettings(
parser, fieldName, settings, parseFieldMatcher))) {
throw new IllegalArgumentException("suggester[term] doesn't support [" + fieldName + "]");
}
}
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
@ -67,12 +66,6 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
return response;
}
@Override
public SuggestContextParser getContextParser() {
return new TermSuggestParser(this);
}
private List<Token> queryTerms(SuggestionContext suggestion, CharsRefBuilder spare) throws IOException {
final List<Token> result = new ArrayList<>();
final String field = suggestion.getField();

View File

@ -19,9 +19,7 @@
package org.elasticsearch.search.suggest;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@ -34,43 +32,23 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptServiceTests.TestEngineService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.equalTo;
@ -83,8 +61,6 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
protected static IndicesQueriesRegistry queriesRegistry;
protected static ParseFieldMatcher parseFieldMatcher;
protected static Suggesters suggesters;
private static ScriptService scriptService;
private static SuggestParseElement parseElement;
/**
* setup for the whole base test class
@ -101,15 +77,7 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry
.ScriptEngineRegistration(TestEngineService.class, TestEngineService.TYPES)));
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
scriptService = new ScriptService(baseSettings, environment, Collections.singleton(new TestEngineService()),
new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings) {
@Override
public CompiledScript compile(Script script, ScriptContext scriptContext, Map<String, String> params) {
return new CompiledScript(ScriptType.INLINE, "mockName", "mocklang", script);
}
};
suggesters = new Suggesters(Collections.emptyMap());
parseElement = new SuggestParseElement(suggesters);
namedWriteableRegistry = new NamedWriteableRegistry();
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE);
@ -224,104 +192,6 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
}
}
protected Tuple<MappedFieldType, SB> randomFieldTypeAndSuggestionBuilder() {
StringFieldType type = new StringFieldType();
if (randomBoolean()) {
type.setSearchAnalyzer(new NamedAnalyzer("foo", new WhitespaceAnalyzer()));
}
return new Tuple<>(type, randomTestBuilder());
}
/**
* parses random suggestion builder via old parseElement method and via
* build, comparing the results for equality
*/
public void testBuild() throws IOException {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), Settings.EMPTY);
AnalysisService mockAnalysisService = new AnalysisService(idxSettings, Collections.emptyMap(), Collections.emptyMap(),
Collections.emptyMap(), Collections.emptyMap()) {
@Override
public NamedAnalyzer analyzer(String name) {
return new NamedAnalyzer(name, new WhitespaceAnalyzer());
}
};
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SuggestBuilder suggestBuilder = new SuggestBuilder();
final Tuple<MappedFieldType, SB> mappedFieldTypeSBTuple = randomFieldTypeAndSuggestionBuilder();
final MapperService mapperService = new MapperService(idxSettings, mockAnalysisService, null,
new IndicesModule().getMapperRegistry(), null) {
@Override
public MappedFieldType fullName(String fullName) {
return mappedFieldTypeSBTuple.v1();
}
};
SB suggestionBuilder = mappedFieldTypeSBTuple.v2();
QueryShardContext mockShardContext = new QueryShardContext(idxSettings,
null, null, mapperService, null, scriptService, null) {
@Override
public MappedFieldType fieldMapper(String name) {
StringFieldMapper.Builder builder = new StringFieldMapper.Builder(name);
return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType();
}
};
mockShardContext.setMapUnmappedFieldAsString(true);
suggestBuilder.addSuggestion(randomAsciiOfLength(10), suggestionBuilder);
if (suggestionBuilder.text() == null) {
// we either need suggestion text or global text
suggestBuilder.setGlobalText(randomAsciiOfLengthBetween(5, 50));
}
if (suggestionBuilder.text() != null && suggestionBuilder.prefix() != null) {
suggestionBuilder.prefix(null);
}
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
xContentBuilder.prettyPrint();
}
suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes());
parser.nextToken(); // set cursor to START_OBJECT
SuggestionSearchContext parsedSuggestionSearchContext = parseElement.parseInternal(parser, mockShardContext);
SuggestionSearchContext buildSuggestSearchContext = suggestBuilder.build(mockShardContext);
assertEquals(parsedSuggestionSearchContext.suggestions().size(), buildSuggestSearchContext.suggestions().size());
Iterator<Map.Entry<String, SuggestionContext>> iterator = buildSuggestSearchContext.suggestions().entrySet().iterator();
for (Map.Entry<String, SuggestionContext> entry : parsedSuggestionSearchContext.suggestions().entrySet()) {
Map.Entry<String, SuggestionContext> other = iterator.next();
assertEquals(entry.getKey(), other.getKey());
SuggestionContext oldSchoolContext = entry.getValue();
SuggestionContext newSchoolContext = other.getValue();
assertNotSame(oldSchoolContext, newSchoolContext);
// deep comparison of analyzers is difficult here, but we check they are set or not set
if (oldSchoolContext.getAnalyzer() != null) {
assertNotNull(newSchoolContext.getAnalyzer());
} else {
assertNull(newSchoolContext.getAnalyzer());
}
assertEquals(oldSchoolContext.getField(), newSchoolContext.getField());
assertEquals(oldSchoolContext.getPrefix(), newSchoolContext.getPrefix());
assertEquals(oldSchoolContext.getRegex(), newSchoolContext.getRegex());
assertEquals(oldSchoolContext.getShardSize(), newSchoolContext.getShardSize());
assertEquals(oldSchoolContext.getSize(), newSchoolContext.getSize());
assertEquals(oldSchoolContext.getSuggester().getClass(), newSchoolContext.getSuggester().getClass());
assertEquals(oldSchoolContext.getText(), newSchoolContext.getText());
assertEquals(oldSchoolContext.getClass(), newSchoolContext.getClass());
assertSuggestionContext(oldSchoolContext, newSchoolContext);
}
}
}
/**
* compare two SuggestionContexte implementations for the special suggestion type under test
*/
protected abstract void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion);
private SB mutate(SB firstBuilder) throws IOException {
SB mutation = serializedCopy(firstBuilder);
assertNotSame(mutation, firstBuilder);

View File

@ -54,16 +54,6 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
return response;
}
@Override
public SuggestContextParser getContextParser() {
return (parser, shardContext) -> {
Map<String, Object> options = parser.map();
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(shardContext, options);
suggestionContext.setField((String) options.get("field"));
return suggestionContext;
};
}
public static class CustomSuggestionsContext extends SuggestionSearchContext.SuggestionContext {
public Map<String, Object> options;

View File

@ -112,34 +112,6 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe
return builderAndInfo;
}
@Override
protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) {
assertThat(oldSuggestion, instanceOf(CompletionSuggestionContext.class));
assertThat(newSuggestion, instanceOf(CompletionSuggestionContext.class));
CompletionSuggestionContext oldCompletionSuggestion = (CompletionSuggestionContext) oldSuggestion;
CompletionSuggestionContext newCompletionSuggestion = (CompletionSuggestionContext) newSuggestion;
assertEquals(oldCompletionSuggestion.getFieldType(), newCompletionSuggestion.getFieldType());
assertEquals(oldCompletionSuggestion.getPayloadFields(), newCompletionSuggestion.getPayloadFields());
assertEquals(oldCompletionSuggestion.getFuzzyOptions(), newCompletionSuggestion.getFuzzyOptions());
assertEquals(oldCompletionSuggestion.getRegexOptions(), newCompletionSuggestion.getRegexOptions());
assertEquals(oldCompletionSuggestion.getQueryContexts(), newCompletionSuggestion.getQueryContexts());
}
@Override
protected Tuple<MappedFieldType, CompletionSuggestionBuilder> randomFieldTypeAndSuggestionBuilder() {
final BuilderAndInfo builderAndInfo = randomSuggestionBuilderWithContextInfo();
CompletionFieldMapper.CompletionFieldType type = new CompletionFieldMapper.CompletionFieldType();
List<ContextMapping> contextMappings = builderAndInfo.catContexts.stream()
.map(catContext -> new CategoryContextMapping.Builder(catContext).build())
.collect(Collectors.toList());
contextMappings.addAll(builderAndInfo.geoContexts.stream()
.map(geoContext -> new GeoContextMapping.Builder(geoContext).build())
.collect(Collectors.toList()));
type.setContextMappings(new ContextMappings(contextMappings));
return new Tuple<>(type, builderAndInfo.builder);
}
@Override
protected void mutateSpecificParameters(CompletionSuggestionBuilder builder) throws IOException {
switch (randomIntBetween(0, 5)) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -31,23 +30,10 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException;
import java.util.Arrays;
@ -147,61 +133,6 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
}
}
/**
* test that build() outputs a {@link DirectCandidateGenerator} that is similar to the one
* we would get when parsing the xContent the test generator is rendering out
*/
public void testBuild() throws IOException {
long start = System.currentTimeMillis();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), Settings.EMPTY);
AnalysisService mockAnalysisService = new AnalysisService(idxSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()) {
@Override
public NamedAnalyzer analyzer(String name) {
return new NamedAnalyzer(name, new WhitespaceAnalyzer());
}
};
MapperService mockMapperService = new MapperService(idxSettings, mockAnalysisService , null, new IndicesModule().getMapperRegistry(), null) {
@Override
public MappedFieldType fullName(String fullName) {
return new StringFieldType();
}
};
QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, mockMapperService, null, null, null) {
@Override
public MappedFieldType fieldMapper(String name) {
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType();
}
};
mockShardContext.setMapUnmappedFieldAsString(true);
for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
DirectCandidateGeneratorBuilder generator = randomCandidateGenerator();
// first, build via DirectCandidateGenerator#build()
DirectCandidateGenerator contextGenerator = generator.build(mockMapperService);
// second, render random test generator to xContent and parse using
// PhraseSuggestParser
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
generator.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentParser parser = XContentHelper.createParser(builder.bytes());
DirectCandidateGenerator secondGenerator = PhraseSuggestParser.parseCandidateGenerator(parser,
mockMapperService, ParseFieldMatcher.EMPTY);
// compare their properties
assertNotSame(contextGenerator, secondGenerator);
assertEqualGenerators(contextGenerator, secondGenerator);
}
}
public static void assertEqualGenerators(DirectCandidateGenerator first, DirectCandidateGenerator second) {
assertEquals(first.field(), second.field());
assertEquals(first.accuracy(), second.accuracy(), Float.MIN_VALUE);

View File

@ -160,40 +160,6 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC
}
}
@Override
protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) {
assertThat(oldSuggestion, instanceOf(PhraseSuggestionContext.class));
assertThat(newSuggestion, instanceOf(PhraseSuggestionContext.class));
PhraseSuggestionContext oldPhraseSuggestion = (PhraseSuggestionContext) oldSuggestion;
PhraseSuggestionContext newPhraseSuggestion = (PhraseSuggestionContext) newSuggestion;
assertEquals(oldPhraseSuggestion.confidence(), newPhraseSuggestion.confidence(), Float.MIN_VALUE);
assertEquals(oldPhraseSuggestion.collatePrune(), newPhraseSuggestion.collatePrune());
assertEquals(oldPhraseSuggestion.gramSize(), newPhraseSuggestion.gramSize());
assertEquals(oldPhraseSuggestion.realworldErrorLikelyhood(), newPhraseSuggestion.realworldErrorLikelyhood(), Float.MIN_VALUE);
assertEquals(oldPhraseSuggestion.maxErrors(), newPhraseSuggestion.maxErrors(), Float.MIN_VALUE);
assertEquals(oldPhraseSuggestion.separator(), newPhraseSuggestion.separator());
assertEquals(oldPhraseSuggestion.getTokenLimit(), newPhraseSuggestion.getTokenLimit());
assertEquals(oldPhraseSuggestion.getRequireUnigram(), newPhraseSuggestion.getRequireUnigram());
assertEquals(oldPhraseSuggestion.getPreTag(), newPhraseSuggestion.getPreTag());
assertEquals(oldPhraseSuggestion.getPostTag(), newPhraseSuggestion.getPostTag());
if (oldPhraseSuggestion.getCollateQueryScript() != null) {
// only assert that we have a compiled script on the other side
assertNotNull(newPhraseSuggestion.getCollateQueryScript());
}
if (oldPhraseSuggestion.generators() != null) {
assertNotNull(newPhraseSuggestion.generators());
assertEquals(oldPhraseSuggestion.generators().size(), newPhraseSuggestion.generators().size());
Iterator<DirectCandidateGenerator> secondList = newPhraseSuggestion.generators().iterator();
for (DirectCandidateGenerator candidateGenerator : newPhraseSuggestion.generators()) {
DirectCandidateGeneratorTests.assertEqualGenerators(candidateGenerator, secondList.next());
}
}
assertEquals(oldPhraseSuggestion.getCollateScriptParams(), newPhraseSuggestion.getCollateScriptParams());
if (oldPhraseSuggestion.model() != null) {
assertNotNull(newPhraseSuggestion.model());
}
}
public void testInvalidParameters() throws IOException {
// test missing field name
try {

View File

@ -302,16 +302,6 @@ public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCas
}
}
@Override
protected void assertSuggestionContext(SuggestionContext oldSuggestion, SuggestionContext newSuggestion) {
@SuppressWarnings("unchecked")
TermSuggestionContext oldContext = (TermSuggestionContext) oldSuggestion;
@SuppressWarnings("unchecked")
TermSuggestionContext newContext = (TermSuggestionContext) newSuggestion;
assertSpellcheckerSettings(oldContext.getDirectSpellCheckerSettings(), newContext.getDirectSpellCheckerSettings());
}
private void assertSpellcheckerSettings(DirectSpellcheckerSettings oldSettings, DirectSpellcheckerSettings newSettings) {
final double delta = 0.0d;
// make sure the objects aren't the same