Merge branch 'feature-suggest-refactoring'

Refactors all suggestion builders to be able to be parsed on
the coordinating node and serialized as objects to the shards.
Specifically, all SuggestionBuilder implementations implement
NamedWritable for serialization, a fromXContent() method that
handles parsing xContent and a build() method that is called
on the shard to create the SuggestionContext.

Relates to #10217
This commit is contained in:
Christoph Büscher 2016-03-16 16:29:10 +01:00
commit 055234c893
100 changed files with 6275 additions and 2806 deletions

View File

@ -208,7 +208,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportClearScrollAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportMultiSearchAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]suggest[/\\]SuggestResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]suggest[/\\]TransportSuggestAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ActionFilter.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]AutoCreateIndex.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" />
@ -268,7 +267,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cache[/\\]recycler[/\\]PageCacheRecycler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]ElasticsearchClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]FilterClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]Requests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]node[/\\]NodeClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]support[/\\]AbstractClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClient.java" checks="LineLength" />
@ -895,7 +893,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestUtils.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]Suggesters.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]ContextMapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoContextMapping.java" checks="LineLength" />
@ -906,12 +903,9 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]LinearInterpoatingScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellChecker.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestionBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]StupidBackoffScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]WordScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RestoreService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotInfo.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardFailure.java" checks="LineLength" />
@ -1402,7 +1396,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomIOExceptionsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportSearchFailuresIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportTwoNodesSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]builder[/\\]SearchSourceBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ChildQuerySearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ParentFieldLoadingIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhasePluginIT.java" checks="LineLength" />
@ -1424,7 +1417,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]ExistsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]MultiMatchQueryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]SearchQueryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescoreBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]DuelScrollIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollWithFailingNodesIT.java" checks="LineLength" />
@ -1435,12 +1427,10 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CompletionSuggestSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]ContextCompletionSuggestSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggesterSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CategoryContextMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]GeoContextMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]DirectCandidateGeneratorTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellCheckerTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]SmoothingModelTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]similarity[/\\]SimilarityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]AbstractSnapshotIntegTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]BlobStoreFormatIT.java" checks="LineLength" />

View File

@ -393,9 +393,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
}
/**
* Delegates to
* {@link org.elasticsearch.search.suggest.SuggestBuilder#addSuggestion(org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder)}
* .
* Delegates to {@link SearchSourceBuilder#suggest(SuggestBuilder)}
*/
public SearchRequestBuilder suggest(SuggestBuilder suggestBuilder) {
sourceBuilder().suggest(suggestBuilder);

View File

@ -20,10 +20,10 @@
package org.elasticsearch.action.suggest;
import org.elasticsearch.action.support.broadcast.BroadcastShardRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.suggest.SuggestBuilder;
import java.io.IOException;
@ -32,29 +32,29 @@ import java.io.IOException;
*/
public final class ShardSuggestRequest extends BroadcastShardRequest {
private BytesReference suggestSource;
private SuggestBuilder suggest;
public ShardSuggestRequest() {
}
ShardSuggestRequest(ShardId shardId, SuggestRequest request) {
super(shardId, request);
this.suggestSource = request.suggest();
this.suggest = request.suggest();
}
public BytesReference suggest() {
return suggestSource;
public SuggestBuilder suggest() {
return suggest;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
suggestSource = in.readBytesReference();
suggest = SuggestBuilder.PROTOTYPE.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeBytesReference(suggestSource);
suggest.writeTo(out);
}
}

View File

@ -21,27 +21,25 @@ package org.elasticsearch.action.suggest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.suggest.SuggestBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
/**
* A request to get suggestions for corrections of phrases. Best created with
* {@link org.elasticsearch.client.Requests#suggestRequest(String...)}.
* <p>
* The request requires the suggest query source to be set either using
* {@link #suggest(org.elasticsearch.common.bytes.BytesReference)} / {@link #suggest(org.elasticsearch.common.bytes.BytesReference)}
* or by using {@link #suggest(org.elasticsearch.search.suggest.SuggestBuilder)}
* (Best created using the {link @org.elasticsearch.search.suggest.SuggestBuilders)}).
* The request requires the suggest query source to be set using
* {@link #suggest(org.elasticsearch.search.suggest.SuggestBuilder)}
*
* @see SuggestResponse
* @see org.elasticsearch.client.Client#suggest(SuggestRequest)
@ -56,7 +54,7 @@ public final class SuggestRequest extends BroadcastRequest<SuggestRequest> {
@Nullable
private String preference;
private BytesReference suggestSource;
private SuggestBuilder suggest;
public SuggestRequest() {
}
@ -76,40 +74,21 @@ public final class SuggestRequest extends BroadcastRequest<SuggestRequest> {
}
/**
* The Phrase to get correction suggestions for
* The suggestion query to get correction suggestions for
*/
public BytesReference suggest() {
return suggestSource;
public SuggestBuilder suggest() {
return suggest;
}
/**
* set a new source for the suggest query
* set a new source for the suggest query
*/
public SuggestRequest suggest(BytesReference suggestSource) {
this.suggestSource = suggestSource;
public SuggestRequest suggest(SuggestBuilder suggest) {
Objects.requireNonNull(suggest, "suggest must not be null");
this.suggest = suggest;
return this;
}
/**
* set a new source using a {@link org.elasticsearch.search.suggest.SuggestBuilder}
* for phrase and term suggestion lookup
*/
public SuggestRequest suggest(SuggestBuilder suggestBuilder) {
return suggest(suggestBuilder.buildAsBytes(Requests.CONTENT_TYPE));
}
/**
* set a new source using a {@link org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder}
* for completion suggestion lookup
*/
public SuggestRequest suggest(SuggestBuilder.SuggestionBuilder suggestionBuilder) {
return suggest(suggestionBuilder.buildAsBytes(Requests.CONTENT_TYPE));
}
public SuggestRequest suggest(String source) {
return suggest(new BytesArray(source));
}
/**
* A comma separated list of routing values to control the shards the search will be executed on.
*/
@ -147,25 +126,29 @@ public final class SuggestRequest extends BroadcastRequest<SuggestRequest> {
super.readFrom(in);
routing = in.readOptionalString();
preference = in.readOptionalString();
suggest(in.readBytesReference());
suggest = SuggestBuilder.PROTOTYPE.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
Objects.requireNonNull(suggest, "suggest must not be null");
super.writeTo(out);
out.writeOptionalString(routing);
out.writeOptionalString(preference);
out.writeBytesReference(suggestSource);
suggest.writeTo(out);
}
@Override
public String toString() {
Objects.requireNonNull(suggest, "suggest must not be null");
String sSource = "_na_";
try {
sSource = XContentHelper.convertToJson(suggestSource, false);
XContentBuilder builder = JsonXContent.contentBuilder();
builder = suggest.toXContent(builder, ToXContent.EMPTY_PARAMS);
sSource = builder.string();
} catch (Exception e) {
// ignore
}
return "[" + Arrays.toString(indices) + "]" + ", suggestSource[" + sSource + "]";
return "[" + Arrays.toString(indices) + "]" + ", suggest[" + sSource + "]";
}
}

View File

@ -19,17 +19,10 @@
package org.elasticsearch.action.suggest;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder;
import java.io.IOException;
import org.elasticsearch.search.suggest.SuggestionBuilder;
/**
* A suggest action request builder.
@ -44,9 +37,11 @@ public class SuggestRequestBuilder extends BroadcastOperationRequestBuilder<Sugg
/**
* Add a definition for suggestions to the request
* @param name the name for the suggestion that will also be used in the response
* @param suggestion the suggestion configuration
*/
public <T> SuggestRequestBuilder addSuggestion(SuggestionBuilder<T> suggestion) {
suggest.addSuggestion(suggestion);
public SuggestRequestBuilder addSuggestion(String name, SuggestionBuilder<?> suggestion) {
suggest.addSuggestion(name, suggestion);
return this;
}
@ -59,7 +54,7 @@ public class SuggestRequestBuilder extends BroadcastOperationRequestBuilder<Sugg
}
public SuggestRequestBuilder setSuggestText(String globalText) {
this.suggest.setText(globalText);
this.suggest.setGlobalText(globalText);
return this;
}
@ -84,13 +79,7 @@ public class SuggestRequestBuilder extends BroadcastOperationRequestBuilder<Sugg
@Override
protected SuggestRequest beforeExecute(SuggestRequest request) {
try {
XContentBuilder builder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE);
suggest.toXContent(builder, ToXContent.EMPTY_PARAMS);
request.suggest(builder.bytes());
} catch (IOException e) {
throw new ElasticsearchException("Unable to build suggestion request", e);
}
request.suggest(suggest);
return request;
}
}

View File

@ -32,17 +32,15 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.suggest.stats.ShardSuggestMetric;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestPhase;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.threadpool.ThreadPool;
@ -58,15 +56,16 @@ import java.util.concurrent.atomic.AtomicReferenceArray;
/**
* Defines the transport of a suggestion request across the cluster
*/
public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequest, SuggestResponse, ShardSuggestRequest, ShardSuggestResponse> {
public class TransportSuggestAction
extends TransportBroadcastAction<SuggestRequest, SuggestResponse, ShardSuggestRequest, ShardSuggestResponse> {
private final IndicesService indicesService;
private final SuggestPhase suggestPhase;
@Inject
public TransportSuggestAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, SuggestPhase suggestPhase, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver) {
public TransportSuggestAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, IndicesService indicesService, SuggestPhase suggestPhase,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, SuggestAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
SuggestRequest::new, ShardSuggestRequest::new, ThreadPool.Names.SUGGEST);
this.indicesService = indicesService;
@ -85,7 +84,8 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
@Override
protected GroupShardsIterator shards(ClusterState clusterState, SuggestRequest request, String[] concreteIndices) {
Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, request.routing(), request.indices());
Map<String, Set<String>> routingMap =
indexNameExpressionResolver.resolveSearchRouting(clusterState, request.routing(), request.indices());
return clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference());
}
@ -124,7 +124,8 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
}
}
return new SuggestResponse(new Suggest(Suggest.reduce(groupedSuggestions)), shardsResponses.length(), successfulShards, failedShards, shardFailures);
return new SuggestResponse(new Suggest(Suggest.reduce(groupedSuggestions)), shardsResponses.length(),
successfulShards, failedShards, shardFailures);
}
@Override
@ -134,16 +135,10 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
ShardSuggestMetric suggestMetric = indexShard.getSuggestMetric();
suggestMetric.preSuggest();
long startTime = System.nanoTime();
XContentParser parser = null;
try (Engine.Searcher searcher = indexShard.acquireSearcher("suggest")) {
BytesReference suggest = request.suggest();
if (suggest != null && suggest.length() > 0) {
parser = XContentFactory.xContent(suggest).createParser(suggest);
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("suggest content missing");
}
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(),
indexService.fieldData(), request.shardId());
SuggestBuilder suggest = request.suggest();
if (suggest != null) {
final SuggestionSearchContext context = suggest.build(indexService.newQueryShardContext());
final Suggest result = suggestPhase.execute(context, searcher.searcher());
return new ShardSuggestResponse(request.shardId(), result);
}
@ -151,9 +146,6 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
} catch (Throwable ex) {
throw new ElasticsearchException("failed to execute suggest", ex);
} finally {
if (parser != null) {
parser.close();
}
suggestMetric.postSuggest(System.nanoTime() - startTime);
}
}

View File

@ -62,6 +62,7 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.suggest.SuggestBuilder;
/**
* A handy one stop shop for creating requests (make sure to import static this class).
@ -127,7 +128,7 @@ public class Requests {
/**
* Creates a suggest request for getting suggestions from provided <code>indices</code>.
* The suggest query has to be set using the JSON source using {@link org.elasticsearch.action.suggest.SuggestRequest#suggest(org.elasticsearch.common.bytes.BytesReference)}.
* The suggest query has to be set using {@link org.elasticsearch.action.suggest.SuggestRequest#suggest(SuggestBuilder)}.
* @param indices The indices to suggest from. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices
* @see org.elasticsearch.client.Client#suggest(org.elasticsearch.action.suggest.SuggestRequest)
*/
@ -342,7 +343,8 @@ public class Requests {
/**
* Creates a cluster health request.
*
* @param indices The indices to provide additional cluster health information for. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices
* @param indices The indices to provide additional cluster health information for.
* Use <tt>null</tt> or <tt>_all</tt> to execute against all indices
* @return The cluster health request
* @see org.elasticsearch.client.ClusterAdminClient#health(org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest)
*/

View File

@ -39,6 +39,9 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.ingest.IngestStats;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.search.aggregations.AggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
@ -284,6 +287,14 @@ public abstract class StreamInput extends InputStream {
return null;
}
@Nullable
public Float readOptionalFloat() throws IOException {
if (readBoolean()) {
return readFloat();
}
return null;
}
@Nullable
public Integer readOptionalVInt() throws IOException {
if (readBoolean()) {
@ -708,6 +719,13 @@ public abstract class StreamInput extends InputStream {
return readNamedWriteable(RescoreBuilder.class);
}
/**
* Reads a {@link SuggestionBuilder} from the current stream
*/
public SuggestionBuilder<?> readSuggestion() throws IOException {
return readNamedWriteable(SuggestionBuilder.class);
}
/**
* Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream
*/
@ -715,6 +733,13 @@ public abstract class StreamInput extends InputStream {
return readNamedWriteable(ScoreFunctionBuilder.class);
}
/**
* Reads a {@link SmoothingModel} from the current stream
*/
public SmoothingModel readPhraseSuggestionSmoothingModel() throws IOException {
return readNamedWriteable(SmoothingModel.class);
}
/**
* Reads a {@link Task.Status} from the current stream.
*/

View File

@ -37,6 +37,9 @@ import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.search.aggregations.AggregatorBuilder;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
@ -238,6 +241,15 @@ public abstract class StreamOutput extends OutputStream {
}
}
public void writeOptionalFloat(@Nullable Float floatValue) throws IOException {
if (floatValue == null) {
writeBoolean(false);
} else {
writeBoolean(true);
writeFloat(floatValue);
}
}
public void writeOptionalText(@Nullable Text text) throws IOException {
if (text == null) {
writeInt(-1);
@ -691,6 +703,13 @@ public abstract class StreamOutput extends OutputStream {
writeNamedWriteable(scoreFunctionBuilder);
}
/**
* Writes the given {@link SmoothingModel} to the stream
*/
public void writePhraseSuggestionSmoothingModel(SmoothingModel smoothinModel) throws IOException {
writeNamedWriteable(smoothinModel);
}
/**
* Writes a {@link Task.Status} to the current stream.
*/
@ -722,4 +741,12 @@ public abstract class StreamOutput extends OutputStream {
public void writeRescorer(RescoreBuilder<?> rescorer) throws IOException {
writeNamedWriteable(rescorer);
}
/**
* Writes a {@link SuggestionBuilder} to the current stream
*/
public void writeSuggestion(SuggestionBuilder suggestion) throws IOException {
writeNamedWriteable(suggestion);
}
}

View File

@ -19,12 +19,12 @@
package org.elasticsearch.indices.query;
import java.util.Map;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryParser;
import java.util.Map;
public class IndicesQueriesRegistry extends AbstractComponent {
private Map<String, QueryParser<?>> queryParsers;

View File

@ -44,6 +44,7 @@ import org.elasticsearch.rest.action.support.RestToXContentListener;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import java.util.Map;
@ -60,13 +61,14 @@ public class RestMultiSearchAction extends BaseRestHandler {
private final boolean allowExplicitIndex;
private final IndicesQueriesRegistry indicesQueriesRegistry;
private final AggregatorParsers aggParsers;
private final Suggesters suggesters;
@Inject
public RestMultiSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry indicesQueriesRegistry,
AggregatorParsers aggParsers) {
AggregatorParsers aggParsers, Suggesters suggesters) {
super(settings, client);
this.aggParsers = aggParsers;
this.suggesters = suggesters;
controller.registerHandler(GET, "/_msearch", this);
controller.registerHandler(POST, "/_msearch", this);
@ -97,7 +99,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, multiSearchRequest.indicesOptions());
parseRequest(multiSearchRequest, RestActions.getRestContent(request), isTemplateRequest, indices, types,
request.param("search_type"), request.param("routing"), indicesOptions, allowExplicitIndex, indicesQueriesRegistry,
parseFieldMatcher, aggParsers);
parseFieldMatcher, aggParsers, suggesters);
client.multiSearch(multiSearchRequest, new RestToXContentListener<>(channel));
}
@ -112,7 +114,8 @@ public class RestMultiSearchAction extends BaseRestHandler {
@Nullable String routing,
IndicesOptions indicesOptions,
boolean allowExplicitIndex, IndicesQueriesRegistry indicesQueriesRegistry,
ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers) throws Exception {
ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers,
Suggesters suggesters) throws Exception {
XContent xContent = XContentFactory.xContent(data);
int from = 0;
int length = data.length();
@ -193,7 +196,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
} else {
try (XContentParser requestParser = XContentFactory.xContent(slice).createParser(slice)) {
queryParseContext.reset(requestParser);
searchRequest.source(SearchSourceBuilder.parseSearchSource(requestParser, queryParseContext, aggParsers));
searchRequest.source(SearchSourceBuilder.parseSearchSource(requestParser, queryParseContext, aggParsers, suggesters));
}
}
// move pointers

View File

@ -48,6 +48,8 @@ import org.elasticsearch.search.fetch.source.FetchSourceContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
import org.elasticsearch.search.suggest.Suggesters;
import java.io.IOException;
import java.util.Arrays;
@ -64,13 +66,15 @@ public class RestSearchAction extends BaseRestHandler {
private final IndicesQueriesRegistry queryRegistry;
private final AggregatorParsers aggParsers;
private final Suggesters suggesters;
@Inject
public RestSearchAction(Settings settings, RestController controller, Client client, IndicesQueriesRegistry queryRegistry,
AggregatorParsers aggParsers) {
AggregatorParsers aggParsers, Suggesters suggesters) {
super(settings, client);
this.queryRegistry = queryRegistry;
this.aggParsers = aggParsers;
this.suggesters = suggesters;
controller.registerHandler(GET, "/_search", this);
controller.registerHandler(POST, "/_search", this);
controller.registerHandler(GET, "/{index}/_search", this);
@ -88,7 +92,7 @@ public class RestSearchAction extends BaseRestHandler {
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException {
SearchRequest searchRequest = new SearchRequest();
RestSearchAction.parseSearchRequest(searchRequest, queryRegistry, request, parseFieldMatcher, aggParsers, null);
RestSearchAction.parseSearchRequest(searchRequest, queryRegistry, request, parseFieldMatcher, aggParsers, suggesters, null);
client.search(searchRequest, new RestStatusToXContentListener<>(channel));
}
@ -101,8 +105,10 @@ public class RestSearchAction extends BaseRestHandler {
* content is read from the request using
* RestAction.hasBodyContent.
*/
public static void parseSearchRequest(SearchRequest searchRequest, IndicesQueriesRegistry indicesQueriesRegistry, RestRequest request,
ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers, BytesReference restContent) throws IOException {
public static void parseSearchRequest(SearchRequest searchRequest, IndicesQueriesRegistry indicesQueriesRegistry, RestRequest request,
ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers, Suggesters suggesters, BytesReference restContent)
throws IOException {
if (searchRequest.source() == null) {
searchRequest.source(new SearchSourceBuilder());
}
@ -126,7 +132,7 @@ public class RestSearchAction extends BaseRestHandler {
}
} else {
RestActions.parseRestSearchSource(searchRequest.source(), restContent, indicesQueriesRegistry, parseFieldMatcher,
aggParsers);
aggParsers, suggesters);
}
}
@ -254,8 +260,10 @@ public class RestSearchAction extends BaseRestHandler {
String suggestText = request.param("suggest_text", request.param("q"));
int suggestSize = request.paramAsInt("suggest_size", 5);
String suggestMode = request.param("suggest_mode");
searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion(
termSuggestion(suggestField).field(suggestField).text(suggestText).size(suggestSize).suggestMode(suggestMode)));
searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion(suggestField,
termSuggestion(suggestField)
.text(suggestText).size(suggestSize)
.suggestMode(SuggestMode.resolve(suggestMode))));
}
}
}

View File

@ -24,9 +24,14 @@ import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestChannel;
@ -37,6 +42,10 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.rest.action.support.RestBuilderListener;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import java.io.IOException;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
@ -47,9 +56,15 @@ import static org.elasticsearch.rest.action.support.RestActions.buildBroadcastSh
*/
public class RestSuggestAction extends BaseRestHandler {
private final IndicesQueriesRegistry queryRegistry;
private final Suggesters suggesters;
@Inject
public RestSuggestAction(Settings settings, RestController controller, Client client) {
public RestSuggestAction(Settings settings, RestController controller, Client client,
IndicesQueriesRegistry queryRegistry, Suggesters suggesters) {
super(settings, client);
this.queryRegistry = queryRegistry;
this.suggesters = suggesters;
controller.registerHandler(POST, "/_suggest", this);
controller.registerHandler(GET, "/_suggest", this);
controller.registerHandler(POST, "/{index}/_suggest", this);
@ -57,11 +72,17 @@ public class RestSuggestAction extends BaseRestHandler {
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException {
SuggestRequest suggestRequest = new SuggestRequest(Strings.splitStringByCommaToArray(request.param("index")));
suggestRequest.indicesOptions(IndicesOptions.fromRequest(request, suggestRequest.indicesOptions()));
if (RestActions.hasBodyContent(request)) {
suggestRequest.suggest(RestActions.getRestContent(request));
final BytesReference sourceBytes = RestActions.getRestContent(request);
try (XContentParser parser = XContentFactory.xContent(sourceBytes).createParser(sourceBytes)) {
final QueryParseContext context = new QueryParseContext(queryRegistry);
context.reset(parser);
context.parseFieldMatcher(parseFieldMatcher);
suggestRequest.suggest(SuggestBuilder.fromXContent(context, suggesters));
}
} else {
throw new IllegalArgumentException("no content or source provided to execute suggestion");
}

View File

@ -42,6 +42,7 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import java.io.IOException;
@ -114,14 +115,15 @@ public class RestActions {
return queryBuilder;
}
public static void parseRestSearchSource(SearchSourceBuilder source, BytesReference sourceBytes, IndicesQueriesRegistry queryRegistry,
ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers)
public static void parseRestSearchSource(SearchSourceBuilder source, BytesReference sourceBytes,
IndicesQueriesRegistry queryRegistry, ParseFieldMatcher parseFieldMatcher,
AggregatorParsers aggParsers, Suggesters suggesters)
throws IOException {
XContentParser parser = XContentFactory.xContent(sourceBytes).createParser(sourceBytes);
QueryParseContext queryParseContext = new QueryParseContext(queryRegistry);
queryParseContext.reset(parser);
queryParseContext.parseFieldMatcher(parseFieldMatcher);
source.parseXContent(parser, queryParseContext, aggParsers);
source.parseXContent(parser, queryParseContext, aggParsers, suggesters);
}
/**

View File

@ -226,6 +226,14 @@ import org.elasticsearch.search.rescore.QueryRescorerBuilder;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.Laplace;
import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.StupidBackoff;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import java.util.ArrayList;
import java.util.HashMap;
@ -277,8 +285,9 @@ public class SearchModule extends AbstractModule {
highlighters.registerExtension(key, clazz);
}
public void registerSuggester(String key, Class<? extends Suggester> suggester) {
suggesters.registerExtension(key, suggester);
public void registerSuggester(String key, Suggester<?> suggester) {
suggesters.registerExtension(key, suggester.getClass());
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, suggester.getBuilderPrototype());
}
/**
@ -371,6 +380,12 @@ public class SearchModule extends AbstractModule {
protected void configureSuggesters() {
suggesters.bind(binder());
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
}
protected void configureHighlighters() {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.ExceptionsHelper;
@ -91,6 +92,7 @@ import org.elasticsearch.search.query.QuerySearchResultProvider;
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.search.rescore.RescoreBuilder;
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
@ -152,14 +154,16 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
private final Map<String, SearchParseElement> elementParsers;
private final ParseFieldMatcher parseFieldMatcher;
private AggregatorParsers aggParsers;
private final AggregatorParsers aggParsers;
private final Suggesters suggesters;
@Inject
public SearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService, IndicesService indicesService,
ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase,
QueryPhase queryPhase, FetchPhase fetchPhase, AggregatorParsers aggParsers) {
ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, DfsPhase dfsPhase,
QueryPhase queryPhase, FetchPhase fetchPhase, AggregatorParsers aggParsers, Suggesters suggesters) {
super(settings);
this.aggParsers = aggParsers;
this.suggesters = suggesters;
this.parseFieldMatcher = new ParseFieldMatcher(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
@ -557,7 +561,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
QueryParseContext queryParseContext = new QueryParseContext(indicesService.getIndicesQueryRegistry());
queryParseContext.reset(parser);
queryParseContext.parseFieldMatcher(parseFieldMatcher);
parseSource(context, SearchSourceBuilder.parseSearchSource(parser, queryParseContext, aggParsers));
parseSource(context, SearchSourceBuilder.parseSearchSource(parser, queryParseContext, aggParsers, suggesters));
}
}
parseSource(context, request.source());
@ -720,26 +724,16 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
}
}
if (source.suggest() != null) {
XContentParser suggestParser = null;
try {
suggestParser = XContentFactory.xContent(source.suggest()).createParser(source.suggest());
suggestParser.nextToken();
this.elementParsers.get("suggest").parse(suggestParser, context);
} catch (Exception e) {
String sSource = "_na_";
try {
sSource = source.toString();
} catch (Throwable e1) {
// ignore
}
XContentLocation location = suggestParser != null ? suggestParser.getTokenLocation() : null;
throw new SearchParseException(context, "failed to parse suggest source [" + sSource + "]", location, e);
context.suggest(source.suggest().build(queryShardContext));
} catch (IOException e) {
throw new SearchContextException(context, "failed to create SuggestionSearchContext", e);
}
}
if (source.rescores() != null) {
try {
for (RescoreBuilder<?> rescore : source.rescores()) {
context.addRescore(rescore.build(context.getQueryShardContext()));
context.addRescore(rescore.build(queryShardContext));
}
} catch (IOException e) {
throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
@ -764,7 +758,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
if (source.highlighter() != null) {
HighlightBuilder highlightBuilder = source.highlighter();
try {
context.highlight(highlightBuilder.build(context.getQueryShardContext()));
context.highlight(highlightBuilder.build(queryShardContext));
} catch (IOException e) {
throw new SearchContextException(context, "failed to create SearchContextHighlighter", e);
}
@ -804,6 +798,11 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
} else {
SearchParseElement parseElement = this.elementParsers.get(currentFieldName);
if (parseElement == null) {
if (currentFieldName != null && currentFieldName.equals("suggest")) {
throw new SearchParseException(context,
"suggest is not supported in [ext], please use SearchSourceBuilder#suggest(SuggestBuilder) instead",
extParser.getTokenLocation());
}
throw new SearchParseException(context, "Unknown element [" + currentFieldName + "] in [ext]",
extParser.getTokenLocation());
} else {

View File

@ -55,6 +55,7 @@ import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import java.io.IOException;
import java.util.ArrayList;
@ -105,9 +106,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
return PROTOTYPE.readFrom(in);
}
public static SearchSourceBuilder parseSearchSource(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers)
public static SearchSourceBuilder parseSearchSource(XContentParser parser, QueryParseContext context,
AggregatorParsers aggParsers, Suggesters suggesters)
throws IOException {
return PROTOTYPE.fromXContent(parser, context, aggParsers);
return PROTOTYPE.fromXContent(parser, context, aggParsers, suggesters);
}
/**
@ -156,7 +158,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
private HighlightBuilder highlightBuilder;
private BytesReference suggestBuilder;
private SuggestBuilder suggestBuilder;
private BytesReference innerHitsBuilder;
@ -475,20 +477,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
public SearchSourceBuilder suggest(SuggestBuilder suggestBuilder) {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
suggestBuilder.toXContent(builder, EMPTY_PARAMS);
this.suggestBuilder = builder.bytes();
return this;
} catch (IOException e) {
throw new RuntimeException(e);
}
this.suggestBuilder = suggestBuilder;
return this;
}
/**
* Gets the bytes representing the suggester builder for this request.
* Gets the suggester builder for this request.
*/
public BytesReference suggest() {
public SuggestBuilder suggest() {
return suggestBuilder;
}
@ -736,19 +732,22 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
/**
* Create a new SearchSourceBuilder with attributes set by an xContent.
*/
public SearchSourceBuilder fromXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers)
public SearchSourceBuilder fromXContent(XContentParser parser, QueryParseContext context,
AggregatorParsers aggParsers, Suggesters suggesters)
throws IOException {
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.parseXContent(parser, context, aggParsers);
builder.parseXContent(parser, context, aggParsers, suggesters);
return builder;
}
/**
* Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up
* different defaults than a regular SearchSourceBuilder would have and use
* {@link #fromXContent(XContentParser, QueryParseContext, AggregatorParsers)} if you have normal defaults.
* {@link #fromXContent(XContentParser, QueryParseContext, AggregatorParsers, Suggesters)} if you have normal defaults.
*/
public void parseXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers) throws IOException {
public void parseXContent(XContentParser parser, QueryParseContext context, AggregatorParsers aggParsers, Suggesters suggesters)
throws IOException {
XContentParser.Token token = parser.currentToken();
String currentFieldName = null;
if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) {
@ -852,8 +851,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
innerHitsBuilder = xContentBuilder.bytes();
} else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
suggestBuilder = xContentBuilder.bytes();
suggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
} else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
sorts = new ArrayList<>();
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
@ -1050,10 +1048,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
if (suggestBuilder != null) {
builder.field(SUGGEST_FIELD.getPreferredName());
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(suggestBuilder);
parser.nextToken();
builder.copyCurrentStructure(parser);
builder.field(SUGGEST_FIELD.getPreferredName(), suggestBuilder);
}
if (rescoreBuilders != null) {
@ -1232,7 +1227,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
builder.stats = stats;
}
if (in.readBoolean()) {
builder.suggestBuilder = in.readBytesReference();
builder.suggestBuilder = SuggestBuilder.PROTOTYPE.readFrom(in);
}
builder.terminateAfter = in.readVInt();
builder.timeoutInMillis = in.readLong();
@ -1348,7 +1343,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
boolean hasSuggestBuilder = suggestBuilder != null;
out.writeBoolean(hasSuggestBuilder);
if (hasSuggestBuilder) {
out.writeBytesReference(suggestBuilder);
suggestBuilder.writeTo(out);
}
out.writeVInt(terminateAfter);
out.writeLong(timeoutInMillis);

View File

@ -25,16 +25,29 @@ import org.apache.lucene.util.automaton.LevenshteinAutomata;
public class DirectSpellcheckerSettings {
private SuggestMode suggestMode = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
private float accuracy = 0.5f;
private Suggest.Suggestion.Sort sort = Suggest.Suggestion.Sort.SCORE;
private StringDistance stringDistance = DirectSpellChecker.INTERNAL_LEVENSHTEIN;
private int maxEdits = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE;
private int maxInspections = 5;
private float maxTermFreq = 0.01f;
private int prefixLength = 1;
private int minWordLength = 4;
private float minDocFreq = 0f;
// NB: If this changes, make sure to change the default in TermBuilderSuggester
public static SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
public static float DEFAULT_ACCURACY = 0.5f;
public static SortBy DEFAULT_SORT = SortBy.SCORE;
// NB: If this changes, make sure to change the default in TermBuilderSuggester
public static StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN;
public static int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE;
public static int DEFAULT_MAX_INSPECTIONS = 5;
public static float DEFAULT_MAX_TERM_FREQ = 0.01f;
public static int DEFAULT_PREFIX_LENGTH = 1;
public static int DEFAULT_MIN_WORD_LENGTH = 4;
public static float DEFAULT_MIN_DOC_FREQ = 0f;
private SuggestMode suggestMode = DEFAULT_SUGGEST_MODE;
private float accuracy = DEFAULT_ACCURACY;
private SortBy sort = DEFAULT_SORT;
private StringDistance stringDistance = DEFAULT_STRING_DISTANCE;
private int maxEdits = DEFAULT_MAX_EDITS;
private int maxInspections = DEFAULT_MAX_INSPECTIONS;
private float maxTermFreq = DEFAULT_MAX_TERM_FREQ;
private int prefixLength = DEFAULT_PREFIX_LENGTH;
private int minWordLength = DEFAULT_MIN_WORD_LENGTH;
private float minDocFreq = DEFAULT_MIN_DOC_FREQ;
public SuggestMode suggestMode() {
return suggestMode;
@ -52,11 +65,11 @@ public class DirectSpellcheckerSettings {
this.accuracy = accuracy;
}
public Suggest.Suggestion.Sort sort() {
public SortBy sort() {
return sort;
}
public void sort(Suggest.Suggestion.Sort sort) {
public void sort(SortBy sort) {
this.sort = sort;
}
@ -104,8 +117,8 @@ public class DirectSpellcheckerSettings {
return minWordLength;
}
public void minQueryLength(int minQueryLength) {
this.minWordLength = minQueryLength;
public void minWordLength(int minWordLength) {
this.minWordLength = minWordLength;
}
public float minDocFreq() {
@ -116,4 +129,20 @@ public class DirectSpellcheckerSettings {
this.minDocFreq = minDocFreq;
}
}
@Override
public String toString() {
return "[" +
"suggestMode=" + suggestMode +
",sort=" + sort +
",stringDistance=" + stringDistance +
",accuracy=" + accuracy +
",maxEdits=" + maxEdits +
",maxInspections=" + maxInspections +
",maxTermFreq=" + maxTermFreq +
",prefixLength=" + prefixLength +
",minWordLength=" + minWordLength +
",minDocFreq=" + minDocFreq +
"]";
}
}

View File

@ -0,0 +1,59 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
/**
* An enum representing the valid sorting options
*/
public enum SortBy implements Writeable<SortBy> {
/** Sort should first be based on score, then document frequency and then the term itself. */
SCORE,
/** Sort should first be based on document frequency, then score and then the term itself. */
FREQUENCY;
public static SortBy PROTOTYPE = SCORE;
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(ordinal());
}
@Override
public SortBy readFrom(final StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown SortBy ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
public static SortBy resolve(final String str) {
Objects.requireNonNull(str, "Input string is null");
return valueOf(str.toUpperCase(Locale.ROOT));
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.suggest;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
@ -198,7 +197,6 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
*/
public static class Suggestion<T extends Suggestion.Entry> implements Iterable<T>, Streamable, ToXContent {
public static final int TYPE = 0;
protected String name;
protected int size;
@ -643,39 +641,6 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
}
}
}
public enum Sort {
/**
* Sort should first be based on score.
*/
SCORE((byte) 0x0),
/**
* Sort should first be based on document frequency.
*/
FREQUENCY((byte) 0x1);
private byte id;
private Sort(byte id) {
this.id = id;
}
public byte id() {
return id;
}
public static Sort fromId(byte id) {
if (id == 0) {
return SCORE;
} else if (id == 1) {
return FREQUENCY;
} else {
throw new ElasticsearchException("Illegal suggest sort " + id);
}
}
}
}
@Override

View File

@ -19,198 +19,186 @@
package org.elasticsearch.search.suggest;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
/**
* Defines how to perform suggesting. This builders allows a number of global options to be specified and
* an arbitrary number of {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder} instances.
* an arbitrary number of {@link SuggestionBuilder} instances.
* <p>
* Suggesting works by suggesting terms that appear in the suggest text that are similar compared to the terms in
* provided text. These spelling suggestions are based on several options described in this class.
* Suggesting works by suggesting terms/phrases that appear in the suggest text that are similar compared
* to the terms in provided text. These suggestions are based on several options described in this class.
*/
public class SuggestBuilder extends ToXContentToBytes {
public class SuggestBuilder extends ToXContentToBytes implements Writeable<SuggestBuilder> {
public static final SuggestBuilder PROTOTYPE = new SuggestBuilder();
protected static final ParseField GLOBAL_TEXT_FIELD = new ParseField("text");
private final String name;
private String globalText;
private final Map<String, SuggestionBuilder<?>> suggestions = new HashMap<>();
private final List<SuggestionBuilder<?>> suggestions = new ArrayList<>();
public SuggestBuilder() {
this.name = null;
}
public SuggestBuilder(String name) {
this.name = name;
}
/**
* Sets the text to provide suggestions for. The suggest text is a required option that needs
* to be set either via this setter or via the {@link org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder#setText(String)} method.
* to be set either via this setter or via the {@link org.elasticsearch.search.suggest.SuggestionBuilder#text(String)} method.
* <p>
* The suggest text gets analyzed by the suggest analyzer or the suggest field search analyzer.
* For each analyzed token, suggested terms are suggested if possible.
*/
public SuggestBuilder setText(String globalText) {
public SuggestBuilder setGlobalText(@Nullable String globalText) {
this.globalText = globalText;
return this;
}
/**
* Adds an {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder} instance under a user defined name.
* The order in which the <code>Suggestions</code> are added, is the same as in the response.
* Gets the global suggest text
*/
public SuggestBuilder addSuggestion(SuggestionBuilder<?> suggestion) {
suggestions.add(suggestion);
@Nullable
public String getGlobalText() {
return globalText;
}
/**
* Adds an {@link org.elasticsearch.search.suggest.SuggestionBuilder} instance under a user defined name.
* The order in which the <code>Suggestions</code> are added, is the same as in the response.
* @throws IllegalArgumentException if two suggestions added have the same name
*/
public SuggestBuilder addSuggestion(String name, SuggestionBuilder<?> suggestion) {
Objects.requireNonNull(name, "every suggestion needs a name");
if (suggestions.get(name) == null) {
suggestions.put(name, suggestion);
} else {
throw new IllegalArgumentException("already added another suggestion with name [" + name + "]");
}
return this;
}
/**
* Returns all suggestions with the defined names.
* Get all the <code>Suggestions</code> that were added to the global {@link SuggestBuilder},
* together with their names
*/
public List<SuggestionBuilder<?>> getSuggestion() {
public Map<String, SuggestionBuilder<?>> getSuggestions() {
return suggestions;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if(name == null) {
builder.startObject();
} else {
builder.startObject(name);
}
builder.startObject();
if (globalText != null) {
builder.field("text", globalText);
}
for (SuggestionBuilder<?> suggestion : suggestions) {
builder = suggestion.toXContent(builder, params);
for (Entry<String, SuggestionBuilder<?>> suggestion : suggestions.entrySet()) {
builder.startObject(suggestion.getKey());
suggestion.getValue().toXContent(builder, params);
builder.endObject();
}
builder.endObject();
return builder;
}
public static abstract class SuggestionBuilder<T> extends ToXContentToBytes {
public static SuggestBuilder fromXContent(QueryParseContext parseContext, Suggesters suggesters) throws IOException {
XContentParser parser = parseContext.parser();
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
SuggestBuilder suggestBuilder = new SuggestBuilder();
String fieldName = null;
private String name;
private String suggester;
private String text;
private String prefix;
private String regex;
private String field;
private String analyzer;
private Integer size;
private Integer shardSize;
public SuggestionBuilder(String name, String suggester) {
this.name = name;
this.suggester = suggester;
if (parser.currentToken() == null) {
// when we parse from RestSuggestAction the current token is null, advance the token
parser.nextToken();
}
/**
* Same as in {@link SuggestBuilder#setText(String)}, but in the suggestion scope.
*/
@SuppressWarnings("unchecked")
public T text(String text) {
this.text = text;
return (T) this;
assert parser.currentToken() == XContentParser.Token.START_OBJECT : "current token must be a start object";
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if (parseFieldMatcher.match(fieldName, GLOBAL_TEXT_FIELD)) {
suggestBuilder.setGlobalText(parser.text());
} else {
throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
String suggestionName = fieldName;
if (suggestionName == null) {
throw new IllegalArgumentException("suggestion must have name");
}
suggestBuilder.addSuggestion(suggestionName, SuggestionBuilder.fromXContent(parseContext, suggesters));
} else {
throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]");
}
}
return suggestBuilder;
}
protected void setPrefix(String prefix) {
this.prefix = prefix;
public SuggestionSearchContext build(QueryShardContext context) throws IOException {
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
for (Entry<String, SuggestionBuilder<?>> suggestion : suggestions.entrySet()) {
SuggestionContext suggestionContext = suggestion.getValue().build(context);
if (suggestionContext.getText() == null) {
if (globalText == null) {
throw new IllegalArgumentException("The required text option is missing");
}
suggestionContext.setText(BytesRefs.toBytesRef(globalText));
}
suggestionSearchContext.addSuggestion(suggestion.getKey(), suggestionContext);
}
return suggestionSearchContext;
}
protected void setRegex(String regex) {
this.regex = regex;
@Override
public SuggestBuilder readFrom(StreamInput in) throws IOException {
final SuggestBuilder builder = new SuggestBuilder();
builder.globalText = in.readOptionalString();
final int size = in.readVInt();
for (int i = 0; i < size; i++) {
builder.suggestions.put(in.readString(), in.readSuggestion());
}
return builder;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
if (text != null) {
builder.field("text", text);
}
if (prefix != null) {
builder.field("prefix", prefix);
}
if (regex != null) {
builder.field("regex", regex);
}
builder.startObject(suggester);
if (analyzer != null) {
builder.field("analyzer", analyzer);
}
if (field != null) {
builder.field("field", field);
}
if (size != null) {
builder.field("size", size);
}
if (shardSize != null) {
builder.field("shard_size", shardSize);
}
builder = innerToXContent(builder, params);
builder.endObject();
builder.endObject();
return builder;
}
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
/**
* Sets from what field to fetch the candidate suggestions from. This is an
* required option and needs to be set via this setter or
* {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder#field(String)}
* method
*/
@SuppressWarnings("unchecked")
public T field(String field) {
this.field = field;
return (T)this;
}
/**
* Sets the analyzer to analyse to suggest text with. Defaults to the search
* analyzer of the suggest field.
*/
@SuppressWarnings("unchecked")
public T analyzer(String analyzer) {
this.analyzer = analyzer;
return (T)this;
}
/**
* Sets the maximum suggestions to be returned per suggest text term.
*/
@SuppressWarnings("unchecked")
public T size(int size) {
if (size <= 0) {
throw new IllegalArgumentException("Size must be positive");
}
this.size = size;
return (T)this;
}
/**
* Sets the maximum number of suggested term to be retrieved from each
* individual shard. During the reduce phase the only the top N suggestions
* are returned based on the <code>size</code> option. Defaults to the
* <code>size</code> option.
* <p>
* Setting this to a value higher than the `size` can be useful in order to
* get a more accurate document frequency for suggested terms. Due to the
* fact that terms are partitioned amongst shards, the shard level document
* frequencies of suggestions may not be precise. Increasing this will make
* these document frequencies more precise.
*/
@SuppressWarnings("unchecked")
public T shardSize(Integer shardSize) {
this.shardSize = shardSize;
return (T)this;
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(globalText);
final int size = suggestions.size();
out.writeVInt(size);
for (Entry<String, SuggestionBuilder<?>> suggestion : suggestions.entrySet()) {
out.writeString(suggestion.getKey());
out.writeSuggestion(suggestion.getValue());
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
@SuppressWarnings("unchecked")
SuggestBuilder o = (SuggestBuilder)other;
return Objects.equals(globalText, o.globalText) &&
Objects.equals(suggestions, o.suggestions);
}
@Override
public int hashCode() {
return Objects.hash(globalText, suggestions);
}
}

View File

@ -29,35 +29,32 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
public abstract class SuggestBuilders {
/**
* Creates a term suggestion lookup query with the provided <code>name</code>
* Creates a term suggestion lookup query with the provided <code>field</code>
*
* @param name The suggestion name
* @return a {@link org.elasticsearch.search.suggest.term.TermSuggestionBuilder}
* instance
*/
public static TermSuggestionBuilder termSuggestion(String name) {
return new TermSuggestionBuilder(name);
public static TermSuggestionBuilder termSuggestion(String fieldname) {
return new TermSuggestionBuilder(fieldname);
}
/**
* Creates a phrase suggestion lookup query with the provided <code>name</code>
* Creates a phrase suggestion lookup query with the provided <code>field</code>
*
* @param name The suggestion name
* @return a {@link org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder}
* instance
*/
public static PhraseSuggestionBuilder phraseSuggestion(String name) {
return new PhraseSuggestionBuilder(name);
public static PhraseSuggestionBuilder phraseSuggestion(String fieldname) {
return new PhraseSuggestionBuilder(fieldname);
}
/**
* Creates a completion suggestion lookup query with the provided <code>name</code>
* Creates a completion suggestion lookup query with the provided <code>field</code>
*
* @param name The suggestion name
* @return a {@link org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder}
* instance
*/
public static CompletionSuggestionBuilder completionSuggestion(String name) {
return new CompletionSuggestionBuilder(name);
public static CompletionSuggestionBuilder completionSuggestion(String fieldname) {
return new CompletionSuggestionBuilder(fieldname);
}
}

View File

@ -1,129 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
*
*/
public final class SuggestParseElement implements SearchParseElement {
private Suggesters suggesters;
@Inject
public SuggestParseElement(Suggesters suggesters) {
this.suggesters = suggesters;
}
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.fieldData(),
context.shardTarget().shardId());
context.suggest(suggestionSearchContext);
}
public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService,
IndexFieldDataService fieldDataService, ShardId shardId) throws IOException {
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
BytesRef globalText = null;
String fieldName = null;
Map<String, SuggestionContext> suggestionContexts = new HashMap<>();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if ("text".equals(fieldName)) {
globalText = parser.utf8Bytes();
} else {
throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
String suggestionName = fieldName;
BytesRef suggestText = null;
BytesRef prefix = null;
BytesRef regex = null;
SuggestionContext suggestionContext = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if ("text".equals(fieldName)) {
suggestText = parser.utf8Bytes();
} else if ("prefix".equals(fieldName)) {
prefix = parser.utf8Bytes();
} else if ("regex".equals(fieldName)) {
regex = parser.utf8Bytes();
} else {
throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (suggestionName == null) {
throw new IllegalArgumentException("Suggestion must have name");
}
if (suggesters.get(fieldName) == null) {
throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported");
}
final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();
suggestionContext = contextParser.parse(parser, mapperService, fieldDataService);
}
}
if (suggestionContext != null) {
if (suggestText != null && prefix == null) {
suggestionContext.setPrefix(suggestText);
suggestionContext.setText(suggestText);
} else if (suggestText == null && prefix != null) {
suggestionContext.setPrefix(prefix);
suggestionContext.setText(prefix);
} else if (regex != null) {
suggestionContext.setRegex(regex);
suggestionContext.setText(regex);
}
suggestionContexts.put(suggestionName, suggestionContext);
}
}
}
for (Map.Entry<String, SuggestionContext> entry : suggestionContexts.entrySet()) {
String suggestionName = entry.getKey();
SuggestionContext suggestionContext = entry.getValue();
suggestionContext.setShard(shardId);
SuggestUtils.verifySuggestion(mapperService, globalText, suggestionContext);
suggestionSearchContext.addSuggestion(suggestionName, suggestionContext);
}
return suggestionSearchContext;
}
}

View File

@ -37,28 +37,22 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonMap;
import static java.util.Collections.emptyMap;
/**
*/
public class SuggestPhase extends AbstractComponent implements SearchPhase {
private final Map<String, SearchParseElement> parseElements;
private final SuggestParseElement parseElement;
@Inject
public SuggestPhase(Settings settings, SuggestParseElement suggestParseElement) {
public SuggestPhase(Settings settings) {
super(settings);
this.parseElement = suggestParseElement;
parseElements = singletonMap("suggest", parseElement);
}
@Override
public Map<String, ? extends SearchParseElement> parseElements() {
return parseElements;
}
public SuggestParseElement parseElement() {
return parseElement;
// this is used to parse SearchSourceBuilder.ext() bytes
// we don't allow any suggestion parsing for the extension
return emptyMap();
}
@Override

View File

@ -170,17 +170,8 @@ public final class SuggestUtils {
}
}
public static Suggest.Suggestion.Sort resolveSort(String sortVal) {
if ("score".equals(sortVal)) {
return Suggest.Suggestion.Sort.SCORE;
} else if ("frequency".equals(sortVal)) {
return Suggest.Suggestion.Sort.FREQUENCY;
} else {
throw new IllegalArgumentException("Illegal suggest sort " + sortVal);
}
}
public static StringDistance resolveDistance(String distanceVal) {
distanceVal = distanceVal.toLowerCase(Locale.US);
if ("internal".equals(distanceVal)) {
return DirectSpellChecker.INTERNAL_LEVENSHTEIN;
} else if ("damerau_levenshtein".equals(distanceVal) || "damerauLevenshtein".equals(distanceVal)) {
@ -224,24 +215,24 @@ public final class SuggestUtils {
} else if (parseFieldMatcher.match(fieldName, Fields.SUGGEST_MODE)) {
suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text()));
} else if (parseFieldMatcher.match(fieldName, Fields.SORT)) {
suggestion.sort(SuggestUtils.resolveSort(parser.text()));
suggestion.sort(SortBy.resolve(parser.text()));
} else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) {
suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text()));
suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text()));
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_EDITS)) {
suggestion.maxEdits(parser.intValue());
suggestion.maxEdits(parser.intValue());
if (suggestion.maxEdits() < 1 || suggestion.maxEdits() > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
throw new IllegalArgumentException("Illegal max_edits value " + suggestion.maxEdits());
}
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_INSPECTIONS)) {
suggestion.maxInspections(parser.intValue());
suggestion.maxInspections(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_TERM_FREQ)) {
suggestion.maxTermFreq(parser.floatValue());
suggestion.maxTermFreq(parser.floatValue());
} else if (parseFieldMatcher.match(fieldName, Fields.PREFIX_LENGTH)) {
suggestion.prefixLength(parser.intValue());
suggestion.prefixLength(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.MIN_WORD_LENGTH)) {
suggestion.minQueryLength(parser.intValue());
suggestion.minWordLength(parser.intValue());
} else if (parseFieldMatcher.match(fieldName, Fields.MIN_DOC_FREQ)) {
suggestion.minDocFreq(parser.floatValue());
suggestion.minDocFreq(parser.floatValue());
} else {
return false;
}
@ -268,10 +259,8 @@ public final class SuggestUtils {
return false;
}
return true;
}
public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) {
// Verify options and set defaults
if (suggestion.getField() == null) {
@ -291,7 +280,6 @@ public final class SuggestUtils {
}
}
public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) {
if (analyzer instanceof NamedAnalyzer) {
analyzer = ((NamedAnalyzer)analyzer).analyzer();

View File

@ -29,7 +29,10 @@ public abstract class Suggester<T extends SuggestionSearchContext.SuggestionCont
protected abstract Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException;
public abstract SuggestContextParser getContextParser();
/**
* link the suggester to its corresponding {@link SuggestionBuilder}
*/
public abstract SuggestionBuilder<? extends SuggestionBuilder> getBuilderPrototype();
public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
execute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {

View File

@ -20,8 +20,6 @@ package org.elasticsearch.search.suggest;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.util.ExtensionPoint;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
import org.elasticsearch.search.suggest.term.TermSuggester;
@ -42,26 +40,30 @@ public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
this(Collections.emptyMap());
}
public Suggesters(Map<String, Suggester> suggesters) {
super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestParseElement.class, SuggestPhase.class);
this.parsers = Collections.unmodifiableMap(suggesters);
}
@Inject
public Suggesters(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
this(addBuildIns(suggesters, scriptService, indexServices));
public Suggesters(Map<String, Suggester> suggesters) {
super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestPhase.class);
this.parsers = Collections.unmodifiableMap(addBuildIns(suggesters));
}
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters) {
final Map<String, Suggester> map = new HashMap<>();
map.put("phrase", new PhraseSuggester(scriptService, indexServices));
map.put("term", new TermSuggester());
map.put("completion", new CompletionSuggester());
map.put("phrase", PhraseSuggester.PROTOTYPE);
map.put("term", TermSuggester.PROTOTYPE);
map.put("completion", CompletionSuggester.PROTOTYPE);
map.putAll(suggesters);
return map;
}
public Suggester get(String type) {
return parsers.get(type);
public SuggestionBuilder<? extends SuggestionBuilder> getSuggestionPrototype(String suggesterName) {
Suggester<?> suggester = parsers.get(suggesterName);
if (suggester == null) {
throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported");
}
SuggestionBuilder<?> suggestParser = suggester.getBuilderPrototype();
if (suggestParser == null) {
throw new IllegalArgumentException("suggester with name [" + suggesterName + "] not supported");
}
return suggestParser;
}
}

View File

@ -0,0 +1,412 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.io.IOException;
import java.util.Objects;
/**
* Base class for the different suggestion implementations.
*/
public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends ToXContentToBytes implements NamedWriteable<T> {
protected final String field;
protected String text;
protected String prefix;
protected String regex;
protected String analyzer;
protected Integer size;
protected Integer shardSize;
protected static final ParseField TEXT_FIELD = new ParseField("text");
protected static final ParseField PREFIX_FIELD = new ParseField("prefix");
protected static final ParseField REGEX_FIELD = new ParseField("regex");
protected static final ParseField FIELDNAME_FIELD = new ParseField("field");
protected static final ParseField ANALYZER_FIELD = new ParseField("analyzer");
protected static final ParseField SIZE_FIELD = new ParseField("size");
protected static final ParseField SHARDSIZE_FIELD = new ParseField("shard_size");
/**
* Creates a new suggestion.
* @param field field to execute suggestions on
*/
protected SuggestionBuilder(String field) {
Objects.requireNonNull(field, "suggestion requires a field name");
if (field.isEmpty()) {
throw new IllegalArgumentException("suggestion field name is empty");
}
this.field = field;
}
/**
* internal copy constructor that copies over all class fields from second SuggestionBuilder except field name.
*/
protected SuggestionBuilder(String field, SuggestionBuilder<?> in) {
this(field);
text = in.text;
prefix = in.prefix;
regex = in.regex;
analyzer = in.analyzer;
size = in.size;
shardSize = in.shardSize;
}
/**
* Same as in {@link SuggestBuilder#setGlobalText(String)}, but in the suggestion scope.
*/
@SuppressWarnings("unchecked")
public T text(String text) {
this.text = text;
return (T) this;
}
/**
* get the text for this suggestion
*/
public String text() {
return this.text;
}
@SuppressWarnings("unchecked")
protected T prefix(String prefix) {
this.prefix = prefix;
return (T) this;
}
/**
* get the prefix for this suggestion
*/
public String prefix() {
return this.prefix;
}
@SuppressWarnings("unchecked")
protected T regex(String regex) {
this.regex = regex;
return (T) this;
}
/**
* get the regex for this suggestion
*/
public String regex() {
return this.regex;
}
/**
* get the {@link #field()} parameter
*/
public String field() {
return this.field;
}
/**
* Sets the analyzer to analyse to suggest text with. Defaults to the search
* analyzer of the suggest field.
*/
@SuppressWarnings("unchecked")
public T analyzer(String analyzer) {
this.analyzer = analyzer;
return (T)this;
}
/**
* get the {@link #analyzer()} parameter
*/
public String analyzer() {
return this.analyzer;
}
/**
* Sets the maximum suggestions to be returned per suggest text term.
*/
@SuppressWarnings("unchecked")
public T size(int size) {
if (size <= 0) {
throw new IllegalArgumentException("size must be positive");
}
this.size = size;
return (T)this;
}
/**
* get the {@link #size()} parameter
*/
public Integer size() {
return this.size;
}
/**
* Sets the maximum number of suggested term to be retrieved from each
* individual shard. During the reduce phase the only the top N suggestions
* are returned based on the <code>size</code> option. Defaults to the
* <code>size</code> option.
* <p>
* Setting this to a value higher than the `size` can be useful in order to
* get a more accurate document frequency for suggested terms. Due to the
* fact that terms are partitioned amongst shards, the shard level document
* frequencies of suggestions may not be precise. Increasing this will make
* these document frequencies more precise.
*/
@SuppressWarnings("unchecked")
public T shardSize(Integer shardSize) {
this.shardSize = shardSize;
return (T)this;
}
/**
* get the {@link #shardSize()} parameter
*/
public Integer shardSize() {
return this.shardSize;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (text != null) {
builder.field(TEXT_FIELD.getPreferredName(), text);
}
if (prefix != null) {
builder.field(PREFIX_FIELD.getPreferredName(), prefix);
}
if (regex != null) {
builder.field(REGEX_FIELD.getPreferredName(), regex);
}
builder.startObject(getSuggesterName());
if (analyzer != null) {
builder.field(ANALYZER_FIELD.getPreferredName(), analyzer);
}
builder.field(FIELDNAME_FIELD.getPreferredName(), field);
if (size != null) {
builder.field(SIZE_FIELD.getPreferredName(), size);
}
if (shardSize != null) {
builder.field(SHARDSIZE_FIELD.getPreferredName(), shardSize);
}
builder = innerToXContent(builder, params);
builder.endObject();
return builder;
}
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
static SuggestionBuilder<?> fromXContent(QueryParseContext parseContext, Suggesters suggesters)
throws IOException {
XContentParser parser = parseContext.parser();
ParseFieldMatcher parsefieldMatcher = parseContext.parseFieldMatcher();
XContentParser.Token token;
String currentFieldName = null;
String suggestText = null;
String prefix = null;
String regex = null;
SuggestionBuilder<?> suggestionBuilder = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parsefieldMatcher.match(currentFieldName, TEXT_FIELD)) {
suggestText = parser.text();
} else if (parsefieldMatcher.match(currentFieldName, PREFIX_FIELD)) {
prefix = parser.text();
} else if (parsefieldMatcher.match(currentFieldName, REGEX_FIELD)) {
regex = parser.text();
} else {
throw new ParsingException(parser.getTokenLocation(), "suggestion does not support [" + currentFieldName + "]");
}
} else if (token == XContentParser.Token.START_OBJECT) {
SuggestionBuilder<?> suggestParser = suggesters.getSuggestionPrototype(currentFieldName);
if (suggestParser == null) {
throw new ParsingException(parser.getTokenLocation(), "suggestion [" + currentFieldName + "] not supported");
}
suggestionBuilder = suggestParser.innerFromXContent(parseContext);
}
}
if (suggestionBuilder == null) {
throw new ElasticsearchParseException("missing suggestion object");
}
if (suggestText != null) {
suggestionBuilder.text(suggestText);
}
if (prefix != null) {
suggestionBuilder.prefix(prefix);
}
if (regex != null) {
suggestionBuilder.regex(regex);
}
return suggestionBuilder;
}
protected abstract SuggestionBuilder<T> innerFromXContent(QueryParseContext parseContext) throws IOException;
protected abstract SuggestionContext build(QueryShardContext context) throws IOException;
/**
* Transfers the text, prefix, regex, analyzer, field, size and shard size settings from the
* original {@link SuggestionBuilder} to the target {@link SuggestionContext}
*/
protected void populateCommonFields(MapperService mapperService,
SuggestionSearchContext.SuggestionContext suggestionContext) throws IOException {
Objects.requireNonNull(field, "field must not be null");
MappedFieldType fieldType = mapperService.fullName(field);
if (fieldType == null) {
throw new IllegalArgumentException("no mapping found for field [" + field + "]");
} else if (analyzer == null) {
// no analyzer name passed in, so try the field's analyzer, or the default analyzer
if (fieldType.searchAnalyzer() == null) {
suggestionContext.setAnalyzer(mapperService.searchAnalyzer());
} else {
suggestionContext.setAnalyzer(fieldType.searchAnalyzer());
}
} else {
Analyzer luceneAnalyzer = mapperService.analysisService().analyzer(analyzer);
if (luceneAnalyzer == null) {
throw new IllegalArgumentException("analyzer [" + analyzer + "] doesn't exists");
}
suggestionContext.setAnalyzer(luceneAnalyzer);
}
suggestionContext.setField(field);
if (size != null) {
suggestionContext.setSize(size);
}
if (shardSize != null) {
suggestionContext.setShardSize(shardSize);
} else {
// if no shard size is set in builder, use size (or at least 5)
suggestionContext.setShardSize(Math.max(suggestionContext.getSize(), 5));
}
if (text != null) {
suggestionContext.setText(BytesRefs.toBytesRef(text));
}
if (prefix != null) {
suggestionContext.setPrefix(BytesRefs.toBytesRef(prefix));
}
if (regex != null) {
suggestionContext.setRegex(BytesRefs.toBytesRef(regex));
}
if (text != null && prefix == null) {
suggestionContext.setPrefix(BytesRefs.toBytesRef(text));
} else if (text == null && prefix != null) {
suggestionContext.setText(BytesRefs.toBytesRef(prefix));
} else if (text == null && regex != null) {
suggestionContext.setText(BytesRefs.toBytesRef(regex));
}
}
private String getSuggesterName() {
//default impl returns the same as writeable name, but we keep the distinction between the two just to make sure
return getWriteableName();
}
@Override
public final T readFrom(StreamInput in) throws IOException {
String field = in.readString();
T suggestionBuilder = doReadFrom(in, field);
suggestionBuilder.text = in.readOptionalString();
suggestionBuilder.prefix = in.readOptionalString();
suggestionBuilder.regex = in.readOptionalString();
suggestionBuilder.analyzer = in.readOptionalString();
suggestionBuilder.size = in.readOptionalVInt();
suggestionBuilder.shardSize = in.readOptionalVInt();
return suggestionBuilder;
}
/**
* Subclass should return a new instance, reading itself from the input string
* @param in the input string to read from
* @param field the field needed for ctor or concrete suggestion
*/
protected abstract T doReadFrom(StreamInput in, String field) throws IOException;
@Override
public final void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
doWriteTo(out);
out.writeOptionalString(text);
out.writeOptionalString(prefix);
out.writeOptionalString(regex);
out.writeOptionalString(analyzer);
out.writeOptionalVInt(size);
out.writeOptionalVInt(shardSize);
}
protected abstract void doWriteTo(StreamOutput out) throws IOException;
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
@SuppressWarnings("unchecked")
T other = (T) obj;
return Objects.equals(text, other.text()) &&
Objects.equals(prefix, other.prefix()) &&
Objects.equals(regex, other.regex()) &&
Objects.equals(field, other.field()) &&
Objects.equals(analyzer, other.analyzer()) &&
Objects.equals(size, other.size()) &&
Objects.equals(shardSize, other.shardSize()) &&
doEquals(other);
}
/**
* Indicates whether some other {@link SuggestionBuilder} of the same type is "equal to" this one.
*/
protected abstract boolean doEquals(T other);
@Override
public final int hashCode() {
return Objects.hash(text, prefix, regex, field, analyzer, size, shardSize, doHashCode());
}
/**
* HashCode for the subclass of {@link SuggestionBuilder} to implement.
*/
protected abstract int doHashCode();
}

View File

@ -20,7 +20,7 @@ package org.elasticsearch.search.suggest;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.query.QueryShardContext;
import java.util.LinkedHashMap;
import java.util.Map;
@ -38,17 +38,22 @@ public class SuggestionSearchContext {
return suggestions;
}
public static class SuggestionContext {
public abstract static class SuggestionContext {
private BytesRef text;
private BytesRef prefix;
private BytesRef regex;
private final Suggester suggester;
private String field;
private Analyzer analyzer;
private int size = 5;
private int shardSize = -1;
private ShardId shardId;
private QueryShardContext shardContext;
private Suggester<?> suggester;
protected SuggestionContext(Suggester<?> suggester, QueryShardContext shardContext) {
this.suggester = suggester;
this.shardContext = shardContext;
}
public BytesRef getText() {
return text;
@ -74,12 +79,8 @@ public class SuggestionSearchContext {
this.regex = regex;
}
public SuggestionContext(Suggester suggester) {
this.suggester = suggester;
}
public Suggester<SuggestionContext> getSuggester() {
return this.suggester;
return ((Suggester<SuggestionContext>) suggester);
}
public Analyzer getAnalyzer() {
@ -120,12 +121,23 @@ public class SuggestionSearchContext {
this.shardSize = shardSize;
}
public void setShard(ShardId shardId) {
this.shardId = shardId;
public QueryShardContext getShardContext() {
return this.shardContext;
}
public ShardId getShard() {
return shardId;
@Override
public String toString() {
return "[" +
"text=" + text +
",field=" + field +
",prefix=" + prefix +
",regex=" + regex +
",size=" + size +
",shardSize=" + shardSize +
",suggester=" + suggester +
",analyzer=" + analyzer +
",shardContext=" + shardContext +
"]";
}
}

View File

@ -1,178 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.RegexpFlag;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils.Fields;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.RegexOptionsBuilder;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Parses query options for {@link CompletionSuggester}
*
* Acceptable input:
* {
* "field" : STRING
* "size" : INT
* "fuzzy" : BOOLEAN | FUZZY_OBJECT
* "contexts" : QUERY_CONTEXTS
* "regex" : REGEX_OBJECT
* }
*
* FUZZY_OBJECT : {
* "edit_distance" : STRING | INT
* "transpositions" : BOOLEAN
* "min_length" : INT
* "prefix_length" : INT
* "unicode_aware" : BOOLEAN
* "max_determinized_states" : INT
* }
*
* REGEX_OBJECT: {
* "flags" : REGEX_FLAGS
* "max_determinized_states" : INT
* }
*
* see {@link RegexpFlag} for REGEX_FLAGS
*/
public class CompletionSuggestParser implements SuggestContextParser {
private static ObjectParser<CompletionSuggestionContext, ContextAndSuggest> TLP_PARSER = new ObjectParser<>(CompletionSuggestionBuilder.SUGGESTION_NAME, null);
private static ObjectParser<CompletionSuggestionBuilder.RegexOptionsBuilder, ContextAndSuggest> REGEXP_PARSER = new ObjectParser<>(RegexOptionsBuilder.REGEX_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.RegexOptionsBuilder::new);
private static ObjectParser<CompletionSuggestionBuilder.FuzzyOptionsBuilder, ContextAndSuggest> FUZZY_PARSER = new ObjectParser<>(FuzzyOptionsBuilder.FUZZY_OPTIONS.getPreferredName(), CompletionSuggestionBuilder.FuzzyOptionsBuilder::new);
static {
FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyMinLength, FuzzyOptionsBuilder.MIN_LENGTH_FIELD);
FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setMaxDeterminizedStates, FuzzyOptionsBuilder.MAX_DETERMINIZED_STATES_FIELD);
FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setUnicodeAware, FuzzyOptionsBuilder.UNICODE_AWARE_FIELD);
FUZZY_PARSER.declareInt(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setFuzzyPrefixLength, FuzzyOptionsBuilder.PREFIX_LENGTH_FIELD);
FUZZY_PARSER.declareBoolean(CompletionSuggestionBuilder.FuzzyOptionsBuilder::setTranspositions, FuzzyOptionsBuilder.TRANSPOSITION_FIELD);
FUZZY_PARSER.declareValue((a, b) -> {
try {
a.setFuzziness(Fuzziness.parse(b).asDistance());
} catch (IOException e) {
throw new ElasticsearchException(e);
}
}, Fuzziness.FIELD);
REGEXP_PARSER.declareInt(CompletionSuggestionBuilder.RegexOptionsBuilder::setMaxDeterminizedStates, RegexOptionsBuilder.MAX_DETERMINIZED_STATES);
REGEXP_PARSER.declareStringOrNull(CompletionSuggestionBuilder.RegexOptionsBuilder::setFlags, RegexOptionsBuilder.FLAGS_VALUE);
TLP_PARSER.declareStringArray(CompletionSuggestionContext::setPayloadFields, CompletionSuggestionBuilder.PAYLOAD_FIELD);
TLP_PARSER.declareObjectOrDefault(CompletionSuggestionContext::setFuzzyOptionsBuilder, FUZZY_PARSER, CompletionSuggestionBuilder.FuzzyOptionsBuilder::new, FuzzyOptionsBuilder.FUZZY_OPTIONS);
TLP_PARSER.declareObject(CompletionSuggestionContext::setRegexOptionsBuilder, REGEXP_PARSER, RegexOptionsBuilder.REGEX_OPTIONS);
TLP_PARSER.declareString(SuggestionSearchContext.SuggestionContext::setField, Fields.FIELD);
TLP_PARSER.declareField((p, v, c) -> {
String analyzerName = p.text();
Analyzer analyzer = c.mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
v.setAnalyzer(analyzer);
}, Fields.ANALYZER, ObjectParser.ValueType.STRING);
TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setSize, Fields.SIZE);
TLP_PARSER.declareInt(SuggestionSearchContext.SuggestionContext::setShardSize, Fields.SHARD_SIZE);
TLP_PARSER.declareField((p, v, c) -> {
// Copy the current structure. We will parse, once the mapping is provided
XContentBuilder builder = XContentFactory.contentBuilder(p.contentType());
builder.copyCurrentStructure(p);
BytesReference bytes = builder.bytes();
c.contextParser = XContentFactory.xContent(bytes).createParser(bytes);
p.skipChildren();
}, CompletionSuggestionBuilder.CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated
}
private static class ContextAndSuggest {
XContentParser contextParser;
final MapperService mapperService;
ContextAndSuggest(MapperService mapperService) {
this.mapperService = mapperService;
}
}
private final CompletionSuggester completionSuggester;
public CompletionSuggestParser(CompletionSuggester completionSuggester) {
this.completionSuggester = completionSuggester;
}
@Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException {
final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester, mapperService, fieldDataService);
final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService);
TLP_PARSER.parse(parser, suggestion, contextAndSuggest);
final XContentParser contextParser = contextAndSuggest.contextParser;
MappedFieldType mappedFieldType = mapperService.fullName(suggestion.getField());
if (mappedFieldType == null) {
throw new ElasticsearchException("Field [" + suggestion.getField() + "] is not a completion suggest field");
} else if (mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) {
CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType;
if (type.hasContextMappings() == false && contextParser != null) {
throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context");
}
Map<String, List<ContextMapping.QueryContext>> queryContexts = Collections.emptyMap();
if (type.hasContextMappings() && contextParser != null) {
ContextMappings contextMappings = type.getContextMappings();
contextParser.nextToken();
queryContexts = new HashMap<>(contextMappings.size());
assert contextParser.currentToken() == XContentParser.Token.START_OBJECT;
XContentParser.Token currentToken;
String currentFieldName;
while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = contextParser.currentName();
final ContextMapping mapping = contextMappings.get(currentFieldName);
queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser));
}
}
contextParser.close();
}
suggestion.setFieldType(type);
suggestion.setQueryContexts(queryContexts);
return suggestion;
} else {
throw new IllegalArgumentException("Field [" + suggestion.getField() + "] is not a completion suggest field");
}
}
}

View File

@ -34,10 +34,12 @@ import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import java.io.IOException;
import java.util.ArrayList;
@ -50,9 +52,7 @@ import java.util.Set;
public class CompletionSuggester extends Suggester<CompletionSuggestionContext> {
public SuggestContextParser getContextParser() {
return new CompletionSuggestParser(this);
}
public static final CompletionSuggester PROTOTYPE = new CompletionSuggester();
@Override
protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name,
@ -78,15 +78,18 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
}
// collect payloads
final Map<String, List<Object>> payload = new HashMap<>(0);
Set<String> payloadFields = suggestionContext.getPayloadFields();
List<String> payloadFields = suggestionContext.getPayloadFields();
if (payloadFields.isEmpty() == false) {
final int readerIndex = ReaderUtil.subIndex(suggestDoc.doc, leaves);
final LeafReaderContext subReaderContext = leaves.get(readerIndex);
final int subDocId = suggestDoc.doc - subReaderContext.docBase;
for (String field : payloadFields) {
MappedFieldType payloadFieldType = suggestionContext.getMapperService().fullName(field);
MapperService mapperService = suggestionContext.getShardContext().getMapperService();
MappedFieldType payloadFieldType = mapperService.fullName(field);
if (payloadFieldType != null) {
final AtomicFieldData data = suggestionContext.getIndexFieldDataService().getForField(payloadFieldType).load(subReaderContext);
QueryShardContext shardContext = suggestionContext.getShardContext();
final AtomicFieldData data = shardContext.getForField(payloadFieldType)
.load(subReaderContext);
final ScriptDocValues scriptValues = data.getScriptValues();
scriptValues.setNextDocId(subDocId);
payload.put(field, new ArrayList<>(scriptValues.getValues()));
@ -262,4 +265,9 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
}
}
}
@Override
public SuggestionBuilder<?> getBuilderPrototype() {
return CompletionSuggestionBuilder.PROTOTYPE;
}
}

View File

@ -18,26 +18,37 @@
*/
package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.RegexpFlag;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext;
import org.elasticsearch.search.suggest.completion.context.GeoQueryContext;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Objects;
/**
* Defines a suggest command based on a prefix, typically to provide "auto-complete" functionality
@ -45,218 +56,81 @@ import java.util.Set;
* are created at index-time and so must be defined in the mapping with the type "completion" before
* indexing.
*/
public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilder<CompletionSuggestionBuilder> {
public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSuggestionBuilder> {
final static String SUGGESTION_NAME = "completion";
public static final CompletionSuggestionBuilder PROTOTYPE = new CompletionSuggestionBuilder("_na_");
static final String SUGGESTION_NAME = "completion";
static final ParseField PAYLOAD_FIELD = new ParseField("payload");
static final ParseField CONTEXTS_FIELD = new ParseField("contexts", "context");
private FuzzyOptionsBuilder fuzzyOptionsBuilder;
private RegexOptionsBuilder regexOptionsBuilder;
private final Map<String, List<ToXContent>> queryContexts = new HashMap<>();
private final Set<String> payloadFields = new HashSet<>();
public CompletionSuggestionBuilder(String name) {
super(name, SUGGESTION_NAME);
/**
* {
* "field" : STRING
* "size" : INT
* "fuzzy" : BOOLEAN | FUZZY_OBJECT
* "contexts" : QUERY_CONTEXTS
* "regex" : REGEX_OBJECT
* "payload" : STRING_ARRAY
* }
*/
private static ObjectParser<CompletionSuggestionBuilder.InnerBuilder, Void> TLP_PARSER =
new ObjectParser<>(SUGGESTION_NAME, null);
static {
TLP_PARSER.declareStringArray(CompletionSuggestionBuilder.InnerBuilder::payload, PAYLOAD_FIELD);
TLP_PARSER.declareField((parser, completionSuggestionContext, context) -> {
if (parser.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
if (parser.booleanValue()) {
completionSuggestionContext.fuzzyOptions = new FuzzyOptions.Builder().build();
}
} else {
completionSuggestionContext.fuzzyOptions = FuzzyOptions.parse(parser);
}
},
FuzzyOptions.FUZZY_OPTIONS, ObjectParser.ValueType.OBJECT_OR_BOOLEAN);
TLP_PARSER.declareField((parser, completionSuggestionContext, context) ->
completionSuggestionContext.regexOptions = RegexOptions.parse(parser),
RegexOptions.REGEX_OPTIONS, ObjectParser.ValueType.OBJECT);
TLP_PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::field, SuggestUtils.Fields.FIELD);
TLP_PARSER.declareString(CompletionSuggestionBuilder.InnerBuilder::analyzer, SuggestUtils.Fields.ANALYZER);
TLP_PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::size, SuggestUtils.Fields.SIZE);
TLP_PARSER.declareInt(CompletionSuggestionBuilder.InnerBuilder::shardSize, SuggestUtils.Fields.SHARD_SIZE);
TLP_PARSER.declareField((p, v, c) -> {
// Copy the current structure. We will parse, once the mapping is provided
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.copyCurrentStructure(p);
v.contextBytes = builder.bytes();
p.skipChildren();
}, CONTEXTS_FIELD, ObjectParser.ValueType.OBJECT); // context is deprecated
}
protected FuzzyOptions fuzzyOptions;
protected RegexOptions regexOptions;
protected BytesReference contextBytes = null;
protected List<String> payloadFields = Collections.emptyList();
public CompletionSuggestionBuilder(String field) {
super(field);
}
/**
* Options for fuzzy queries
* internal copy constructor that copies over all class fields except for the field which is
* set to the one provided in the first argument
*/
public static class FuzzyOptionsBuilder implements ToXContent {
static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy");
static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions");
static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length");
static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length");
static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware");
static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states");
private int editDistance = FuzzyCompletionQuery.DEFAULT_MAX_EDITS;
private boolean transpositions = FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS;
private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH;
private int fuzzyPrefixLength = FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX;
private boolean unicodeAware = FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public FuzzyOptionsBuilder() {
}
/**
* Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance.
* The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one.
*/
public FuzzyOptionsBuilder setFuzziness(int editDistance) {
this.editDistance = editDistance;
return this;
}
/**
* Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance.
* The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one.
*/
public FuzzyOptionsBuilder setFuzziness(Fuzziness fuzziness) {
this.editDistance = fuzziness.asDistance();
return this;
}
/**
* Sets if transpositions (swapping one character for another) counts as one character
* change or two.
* Defaults to true, meaning it uses the fuzzier option of counting transpositions as
* a single change.
*/
public FuzzyOptionsBuilder setTranspositions(boolean transpositions) {
this.transpositions = transpositions;
return this;
}
/**
* Sets the minimum length of input string before fuzzy suggestions are returned, defaulting
* to 3.
*/
public FuzzyOptionsBuilder setFuzzyMinLength(int fuzzyMinLength) {
this.fuzzyMinLength = fuzzyMinLength;
return this;
}
/**
* Sets the minimum length of the input, which is not checked for fuzzy alternatives, defaults to 1
*/
public FuzzyOptionsBuilder setFuzzyPrefixLength(int fuzzyPrefixLength) {
this.fuzzyPrefixLength = fuzzyPrefixLength;
return this;
}
/**
* Sets the maximum automaton states allowed for the fuzzy expansion
*/
public FuzzyOptionsBuilder setMaxDeterminizedStates(int maxDeterminizedStates) {
this.maxDeterminizedStates = maxDeterminizedStates;
return this;
}
/**
* Set to true if all measurements (like edit distance, transpositions and lengths) are in unicode
* code points (actual letters) instead of bytes. Default is false.
*/
public FuzzyOptionsBuilder setUnicodeAware(boolean unicodeAware) {
this.unicodeAware = unicodeAware;
return this;
}
/**
* Returns the maximum number of edits
*/
int getEditDistance() {
return editDistance;
}
/**
* Returns if transpositions option is set
*
* if transpositions is set, then swapping one character for another counts as one edit instead of two.
*/
boolean isTranspositions() {
return transpositions;
}
/**
* Returns the length of input prefix after which edits are applied
*/
int getFuzzyMinLength() {
return fuzzyMinLength;
}
/**
* Returns the minimum length of the input prefix required to apply any edits
*/
int getFuzzyPrefixLength() {
return fuzzyPrefixLength;
}
/**
* Returns if all measurements (like edit distance, transpositions and lengths) are in unicode code
* points (actual letters) instead of bytes.
*/
boolean isUnicodeAware() {
return unicodeAware;
}
/**
* Returns the maximum automaton states allowed for fuzzy expansion
*/
int getMaxDeterminizedStates() {
return maxDeterminizedStates;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(FUZZY_OPTIONS.getPreferredName());
builder.field(Fuzziness.FIELD.getPreferredName(), editDistance);
builder.field(TRANSPOSITION_FIELD.getPreferredName(), transpositions);
builder.field(MIN_LENGTH_FIELD.getPreferredName(), fuzzyMinLength);
builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), fuzzyPrefixLength);
builder.field(UNICODE_AWARE_FIELD.getPreferredName(), unicodeAware);
builder.field(MAX_DETERMINIZED_STATES_FIELD.getPreferredName(), maxDeterminizedStates);
builder.endObject();
return builder;
}
}
/**
* Options for regular expression queries
*/
public static class RegexOptionsBuilder implements ToXContent {
static final ParseField REGEX_OPTIONS = new ParseField("regex");
static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value");
static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states");
private int flagsValue = RegExp.ALL;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public RegexOptionsBuilder() {
}
/**
* Sets the regular expression syntax flags
* see {@link RegexpFlag}
*/
public RegexOptionsBuilder setFlags(String flags) {
this.flagsValue = RegexpFlag.resolveValue(flags);
return this;
}
/**
* Sets the maximum automaton states allowed for the regular expression expansion
*/
public RegexOptionsBuilder setMaxDeterminizedStates(int maxDeterminizedStates) {
this.maxDeterminizedStates = maxDeterminizedStates;
return this;
}
int getFlagsValue() {
return flagsValue;
}
int getMaxDeterminizedStates() {
return maxDeterminizedStates;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(REGEX_OPTIONS.getPreferredName());
builder.field(FLAGS_VALUE.getPreferredName(), flagsValue);
builder.field(MAX_DETERMINIZED_STATES.getPreferredName(), maxDeterminizedStates);
builder.endObject();
return builder;
}
private CompletionSuggestionBuilder(String fieldname, CompletionSuggestionBuilder in) {
super(fieldname, in);
fuzzyOptions = in.fuzzyOptions;
regexOptions = in.regexOptions;
contextBytes = in.contextBytes;
payloadFields = in.payloadFields;
}
/**
* Sets the prefix to provide completions for.
* The prefix gets analyzed by the suggest analyzer.
*/
@Override
public CompletionSuggestionBuilder prefix(String prefix) {
super.setPrefix(prefix);
super.prefix(prefix);
return this;
}
@ -264,36 +138,37 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde
* Same as {@link #prefix(String)} with fuzziness of <code>fuzziness</code>
*/
public CompletionSuggestionBuilder prefix(String prefix, Fuzziness fuzziness) {
super.setPrefix(prefix);
this.fuzzyOptionsBuilder = new FuzzyOptionsBuilder().setFuzziness(fuzziness);
super.prefix(prefix);
this.fuzzyOptions = new FuzzyOptions.Builder().setFuzziness(fuzziness).build();
return this;
}
/**
* Same as {@link #prefix(String)} with full fuzzy options
* see {@link FuzzyOptionsBuilder}
* see {@link FuzzyOptions.Builder}
*/
public CompletionSuggestionBuilder prefix(String prefix, FuzzyOptionsBuilder fuzzyOptionsBuilder) {
super.setPrefix(prefix);
this.fuzzyOptionsBuilder = fuzzyOptionsBuilder;
public CompletionSuggestionBuilder prefix(String prefix, FuzzyOptions fuzzyOptions) {
super.prefix(prefix);
this.fuzzyOptions = fuzzyOptions;
return this;
}
/**
* Sets a regular expression pattern for prefixes to provide completions for.
*/
@Override
public CompletionSuggestionBuilder regex(String regex) {
super.setRegex(regex);
super.regex(regex);
return this;
}
/**
* Same as {@link #regex(String)} with full regular expression options
* see {@link RegexOptionsBuilder}
* see {@link RegexOptions.Builder}
*/
public CompletionSuggestionBuilder regex(String regex, RegexOptionsBuilder regexOptionsBuilder) {
public CompletionSuggestionBuilder regex(String regex, RegexOptions regexOptions) {
this.regex(regex);
this.regexOptionsBuilder = regexOptionsBuilder;
this.regexOptions = regexOptions;
return this;
}
@ -301,65 +176,189 @@ public class CompletionSuggestionBuilder extends SuggestBuilder.SuggestionBuilde
* Sets the fields to be returned as suggestion payload.
* Note: Only doc values enabled fields are supported
*/
public CompletionSuggestionBuilder payload(String... fields) {
Collections.addAll(this.payloadFields, fields);
public CompletionSuggestionBuilder payload(List<String> fields) {
Objects.requireNonNull(fields, "payload must not be null");
this.payloadFields = fields;
return this;
}
/**
* Sets query contexts for a category context
* @param name of the category context to execute on
* @param queryContexts a list of {@link CategoryQueryContext}
* Sets query contexts for completion
* @param queryContexts named query contexts
* see {@link org.elasticsearch.search.suggest.completion.context.CategoryQueryContext}
* and {@link org.elasticsearch.search.suggest.completion.context.GeoQueryContext}
*/
public CompletionSuggestionBuilder categoryContexts(String name, CategoryQueryContext... queryContexts) {
return contexts(name, queryContexts);
}
/**
* Sets query contexts for a geo context
* @param name of the geo context to execute on
* @param queryContexts a list of {@link GeoQueryContext}
*/
public CompletionSuggestionBuilder geoContexts(String name, GeoQueryContext... queryContexts) {
return contexts(name, queryContexts);
}
private CompletionSuggestionBuilder contexts(String name, ToXContent... queryContexts) {
List<ToXContent> contexts = this.queryContexts.get(name);
if (contexts == null) {
contexts = new ArrayList<>(2);
this.queryContexts.put(name, contexts);
public CompletionSuggestionBuilder contexts(Map<String, List<? extends QueryContext>> queryContexts) {
Objects.requireNonNull(queryContexts, "contexts must not be null");
try {
XContentBuilder contentBuilder = XContentFactory.jsonBuilder();
contentBuilder.startObject();
for (Map.Entry<String, List<? extends QueryContext>> contextEntry : queryContexts.entrySet()) {
contentBuilder.startArray(contextEntry.getKey());
for (ToXContent queryContext : contextEntry.getValue()) {
queryContext.toXContent(contentBuilder, EMPTY_PARAMS);
}
contentBuilder.endArray();
}
contentBuilder.endObject();
contextBytes = contentBuilder.bytes();
return this;
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
private static class InnerBuilder extends CompletionSuggestionBuilder {
private String field;
public InnerBuilder() {
super("_na_");
}
private InnerBuilder field(String field) {
this.field = field;
return this;
}
Collections.addAll(contexts, queryContexts);
return this;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (payloadFields != null) {
if (payloadFields.isEmpty() == false) {
builder.startArray(PAYLOAD_FIELD.getPreferredName());
for (String field : payloadFields) {
builder.value(field);
}
builder.endArray();
}
if (fuzzyOptionsBuilder != null) {
fuzzyOptionsBuilder.toXContent(builder, params);
if (fuzzyOptions != null) {
fuzzyOptions.toXContent(builder, params);
}
if (regexOptionsBuilder != null) {
regexOptionsBuilder.toXContent(builder, params);
if (regexOptions != null) {
regexOptions.toXContent(builder, params);
}
if (queryContexts.isEmpty() == false) {
builder.startObject(CONTEXTS_FIELD.getPreferredName());
for (Map.Entry<String, List<ToXContent>> entry : this.queryContexts.entrySet()) {
builder.startArray(entry.getKey());
for (ToXContent queryContext : entry.getValue()) {
queryContext.toXContent(builder, params);
}
builder.endArray();
}
builder.endObject();
if (contextBytes != null) {
XContentParser contextParser = XContentFactory.xContent(XContentType.JSON).createParser(contextBytes);
builder.field(CONTEXTS_FIELD.getPreferredName());
builder.copyCurrentStructure(contextParser);
}
return builder;
}
@Override
protected CompletionSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
CompletionSuggestionBuilder.InnerBuilder builder = new CompletionSuggestionBuilder.InnerBuilder();
TLP_PARSER.parse(parseContext.parser(), builder);
String field = builder.field;
// now we should have field name, check and copy fields over to the suggestion builder we return
if (field == null) {
throw new ElasticsearchParseException(
"the required field option [" + SuggestUtils.Fields.FIELD.getPreferredName() + "] is missing");
}
return new CompletionSuggestionBuilder(field, builder);
}
@Override
public SuggestionContext build(QueryShardContext context) throws IOException {
CompletionSuggestionContext suggestionContext = new CompletionSuggestionContext(context);
// copy over common settings to each suggestion builder
final MapperService mapperService = context.getMapperService();
populateCommonFields(mapperService, suggestionContext);
suggestionContext.setPayloadFields(payloadFields);
suggestionContext.setFuzzyOptions(fuzzyOptions);
suggestionContext.setRegexOptions(regexOptions);
MappedFieldType mappedFieldType = mapperService.fullName(suggestionContext.getField());
if (mappedFieldType != null && mappedFieldType instanceof CompletionFieldMapper.CompletionFieldType) {
CompletionFieldMapper.CompletionFieldType type = (CompletionFieldMapper.CompletionFieldType) mappedFieldType;
suggestionContext.setFieldType(type);
if (type.hasContextMappings() && contextBytes != null) {
try (XContentParser contextParser = XContentFactory.xContent(contextBytes).createParser(contextBytes)) {
if (type.hasContextMappings() && contextParser != null) {
ContextMappings contextMappings = type.getContextMappings();
contextParser.nextToken();
Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = new HashMap<>(contextMappings.size());
assert contextParser.currentToken() == XContentParser.Token.START_OBJECT;
XContentParser.Token currentToken;
String currentFieldName;
while ((currentToken = contextParser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (currentToken == XContentParser.Token.FIELD_NAME) {
currentFieldName = contextParser.currentName();
final ContextMapping mapping = contextMappings.get(currentFieldName);
queryContexts.put(currentFieldName, mapping.parseQueryContext(contextParser));
}
}
suggestionContext.setQueryContexts(queryContexts);
}
}
} else if (contextBytes != null) {
throw new IllegalArgumentException("suggester [" + type.name() + "] doesn't expect any context");
}
} else {
throw new IllegalArgumentException("Field [" + suggestionContext.getField() + "] is not a completion suggest field");
}
return suggestionContext;
}
@Override
public String getWriteableName() {
return SUGGESTION_NAME;
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeBoolean(payloadFields.isEmpty() == false);
if (payloadFields.isEmpty() == false) {
out.writeVInt(payloadFields.size());
for (String payloadField : payloadFields) {
out.writeString(payloadField);
}
}
out.writeBoolean(fuzzyOptions != null);
if (fuzzyOptions != null) {
fuzzyOptions.writeTo(out);
}
out.writeBoolean(regexOptions != null);
if (regexOptions != null) {
regexOptions.writeTo(out);
}
out.writeBoolean(contextBytes != null);
if (contextBytes != null) {
out.writeBytesReference(contextBytes);
}
}
@Override
public CompletionSuggestionBuilder doReadFrom(StreamInput in, String field) throws IOException {
CompletionSuggestionBuilder completionSuggestionBuilder = new CompletionSuggestionBuilder(field);
if (in.readBoolean()) {
int numPayloadField = in.readVInt();
List<String> payloadFields = new ArrayList<>(numPayloadField);
for (int i = 0; i < numPayloadField; i++) {
payloadFields.add(in.readString());
}
completionSuggestionBuilder.payloadFields = payloadFields;
}
if (in.readBoolean()) {
completionSuggestionBuilder.fuzzyOptions = FuzzyOptions.readFuzzyOptions(in);
}
if (in.readBoolean()) {
completionSuggestionBuilder.regexOptions = RegexOptions.readRegexOptions(in);
}
if (in.readBoolean()) {
completionSuggestionBuilder.contextBytes = in.readBytesReference();
}
return completionSuggestionBuilder;
}
@Override
protected boolean doEquals(CompletionSuggestionBuilder other) {
return Objects.equals(payloadFields, other.payloadFields) &&
Objects.equals(fuzzyOptions, other.fuzzyOptions) &&
Objects.equals(regexOptions, other.regexOptions) &&
Objects.equals(contextBytes, other.contextBytes);
}
@Override
protected int doHashCode() {
return Objects.hash(payloadFields, fuzzyOptions, regexOptions, contextBytes);
}
}

View File

@ -20,39 +20,31 @@ package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.search.suggest.document.CompletionQuery;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*
*/
public class CompletionSuggestionContext extends SuggestionSearchContext.SuggestionContext {
private CompletionFieldMapper.CompletionFieldType fieldType;
private CompletionSuggestionBuilder.FuzzyOptionsBuilder fuzzyOptionsBuilder;
private CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder;
private Map<String, List<ContextMapping.QueryContext>> queryContexts = Collections.emptyMap();
private final MapperService mapperService;
private final IndexFieldDataService indexFieldDataService;
private Set<String> payloadFields = Collections.emptySet();
CompletionSuggestionContext(Suggester suggester, MapperService mapperService, IndexFieldDataService indexFieldDataService) {
super(suggester);
this.indexFieldDataService = indexFieldDataService;
this.mapperService = mapperService;
protected CompletionSuggestionContext(QueryShardContext shardContext) {
super(CompletionSuggester.PROTOTYPE, shardContext);
}
private CompletionFieldMapper.CompletionFieldType fieldType;
private FuzzyOptions fuzzyOptions;
private RegexOptions regexOptions;
private Map<String, List<ContextMapping.InternalQueryContext>> queryContexts = Collections.emptyMap();
private List<String> payloadFields = Collections.emptyList();
CompletionFieldMapper.CompletionFieldType getFieldType() {
return this.fieldType;
}
@ -61,61 +53,60 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest
this.fieldType = fieldType;
}
void setRegexOptionsBuilder(CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder) {
this.regexOptionsBuilder = regexOptionsBuilder;
void setRegexOptions(RegexOptions regexOptions) {
this.regexOptions = regexOptions;
}
void setFuzzyOptionsBuilder(CompletionSuggestionBuilder.FuzzyOptionsBuilder fuzzyOptionsBuilder) {
this.fuzzyOptionsBuilder = fuzzyOptionsBuilder;
void setFuzzyOptions(FuzzyOptions fuzzyOptions) {
this.fuzzyOptions = fuzzyOptions;
}
void setQueryContexts(Map<String, List<ContextMapping.QueryContext>> queryContexts) {
void setQueryContexts(Map<String, List<ContextMapping.InternalQueryContext>> queryContexts) {
this.queryContexts = queryContexts;
}
MapperService getMapperService() {
return mapperService;
}
IndexFieldDataService getIndexFieldDataService() {
return indexFieldDataService;
}
void setPayloadFields(Set<String> fields) {
void setPayloadFields(List<String> fields) {
this.payloadFields = fields;
}
void setPayloadFields(List<String> fields) {
setPayloadFields(new HashSet<String>(fields));
List<String> getPayloadFields() {
return payloadFields;
}
Set<String> getPayloadFields() {
return payloadFields;
public FuzzyOptions getFuzzyOptions() {
return fuzzyOptions;
}
public RegexOptions getRegexOptions() {
return regexOptions;
}
public Map<String, List<ContextMapping.InternalQueryContext>> getQueryContexts() {
return queryContexts;
}
CompletionQuery toQuery() {
CompletionFieldMapper.CompletionFieldType fieldType = getFieldType();
final CompletionQuery query;
if (getPrefix() != null) {
if (fuzzyOptionsBuilder != null) {
if (fuzzyOptions != null) {
query = fieldType.fuzzyQuery(getPrefix().utf8ToString(),
Fuzziness.fromEdits(fuzzyOptionsBuilder.getEditDistance()),
fuzzyOptionsBuilder.getFuzzyPrefixLength(), fuzzyOptionsBuilder.getFuzzyMinLength(),
fuzzyOptionsBuilder.getMaxDeterminizedStates(), fuzzyOptionsBuilder.isTranspositions(),
fuzzyOptionsBuilder.isUnicodeAware());
Fuzziness.fromEdits(fuzzyOptions.getEditDistance()),
fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(),
fuzzyOptions.getMaxDeterminizedStates(), fuzzyOptions.isTranspositions(),
fuzzyOptions.isUnicodeAware());
} else {
query = fieldType.prefixQuery(getPrefix());
}
} else if (getRegex() != null) {
if (fuzzyOptionsBuilder != null) {
if (fuzzyOptions != null) {
throw new IllegalArgumentException("can not use 'fuzzy' options with 'regex");
}
if (regexOptionsBuilder == null) {
regexOptionsBuilder = new CompletionSuggestionBuilder.RegexOptionsBuilder();
if (regexOptions == null) {
regexOptions = RegexOptions.builder().build();
}
query = fieldType.regexpQuery(getRegex(), regexOptionsBuilder.getFlagsValue(),
regexOptionsBuilder.getMaxDeterminizedStates());
query = fieldType.regexpQuery(getRegex(), regexOptions.getFlagsValue(),
regexOptions.getMaxDeterminizedStates());
} else {
throw new IllegalArgumentException("'prefix' or 'regex' must be defined");
}

View File

@ -0,0 +1,310 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
/**
* Fuzzy options for completion suggester
*/
public class FuzzyOptions implements ToXContent, Writeable<FuzzyOptions> {
static final ParseField FUZZY_OPTIONS = new ParseField("fuzzy");
private static final ParseField TRANSPOSITION_FIELD = new ParseField("transpositions");
private static final ParseField MIN_LENGTH_FIELD = new ParseField("min_length");
private static final ParseField PREFIX_LENGTH_FIELD = new ParseField("prefix_length");
private static final ParseField UNICODE_AWARE_FIELD = new ParseField("unicode_aware");
private static final ParseField MAX_DETERMINIZED_STATES_FIELD = new ParseField("max_determinized_states");
/**
* fuzzy : {
* "edit_distance" : STRING | INT
* "transpositions" : BOOLEAN
* "min_length" : INT
* "prefix_length" : INT
* "unicode_aware" : BOOLEAN
* "max_determinized_states" : INT
* }
*/
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>(FUZZY_OPTIONS.getPreferredName(), Builder::new);
static {
PARSER.declareInt(Builder::setFuzzyMinLength, MIN_LENGTH_FIELD);
PARSER.declareInt(Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES_FIELD);
PARSER.declareBoolean(Builder::setUnicodeAware, UNICODE_AWARE_FIELD);
PARSER.declareInt(Builder::setFuzzyPrefixLength, PREFIX_LENGTH_FIELD);
PARSER.declareBoolean(Builder::setTranspositions, TRANSPOSITION_FIELD);
PARSER.declareValue((a, b) -> {
try {
a.setFuzziness(Fuzziness.parse(b).asDistance());
} catch (IOException e) {
throw new ElasticsearchException(e);
}
}, Fuzziness.FIELD);
}
private int editDistance;
private boolean transpositions;
private int fuzzyMinLength;
private int fuzzyPrefixLength;
private boolean unicodeAware;
private int maxDeterminizedStates;
private FuzzyOptions(int editDistance, boolean transpositions, int fuzzyMinLength, int fuzzyPrefixLength,
boolean unicodeAware, int maxDeterminizedStates) {
this.editDistance = editDistance;
this.transpositions = transpositions;
this.fuzzyMinLength = fuzzyMinLength;
this.fuzzyPrefixLength = fuzzyPrefixLength;
this.unicodeAware = unicodeAware;
this.maxDeterminizedStates = maxDeterminizedStates;
}
private FuzzyOptions() {
}
static FuzzyOptions parse(XContentParser parser) throws IOException {
return PARSER.parse(parser).build();
}
public static Builder builder() {
return new Builder();
}
/**
* Returns the maximum number of edits
*/
public int getEditDistance() {
return editDistance;
}
/**
* Returns if transpositions option is set
*
* if transpositions is set, then swapping one character for another counts as one edit instead of two.
*/
public boolean isTranspositions() {
return transpositions;
}
/**
* Returns the length of input prefix after which edits are applied
*/
public int getFuzzyMinLength() {
return fuzzyMinLength;
}
/**
* Returns the minimum length of the input prefix required to apply any edits
*/
public int getFuzzyPrefixLength() {
return fuzzyPrefixLength;
}
/**
* Returns if all measurements (like edit distance, transpositions and lengths) are in unicode code
* points (actual letters) instead of bytes.
*/
public boolean isUnicodeAware() {
return unicodeAware;
}
/**
* Returns the maximum automaton states allowed for fuzzy expansion
*/
public int getMaxDeterminizedStates() {
return maxDeterminizedStates;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FuzzyOptions that = (FuzzyOptions) o;
if (editDistance != that.editDistance) return false;
if (transpositions != that.transpositions) return false;
if (fuzzyMinLength != that.fuzzyMinLength) return false;
if (fuzzyPrefixLength != that.fuzzyPrefixLength) return false;
if (unicodeAware != that.unicodeAware) return false;
return maxDeterminizedStates == that.maxDeterminizedStates;
}
@Override
public int hashCode() {
int result = editDistance;
result = 31 * result + (transpositions ? 1 : 0);
result = 31 * result + fuzzyMinLength;
result = 31 * result + fuzzyPrefixLength;
result = 31 * result + (unicodeAware ? 1 : 0);
result = 31 * result + maxDeterminizedStates;
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(FUZZY_OPTIONS.getPreferredName());
builder.field(Fuzziness.FIELD.getPreferredName(), editDistance);
builder.field(TRANSPOSITION_FIELD.getPreferredName(), transpositions);
builder.field(MIN_LENGTH_FIELD.getPreferredName(), fuzzyMinLength);
builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), fuzzyPrefixLength);
builder.field(UNICODE_AWARE_FIELD.getPreferredName(), unicodeAware);
builder.field(MAX_DETERMINIZED_STATES_FIELD.getPreferredName(), maxDeterminizedStates);
builder.endObject();
return builder;
}
public static FuzzyOptions readFuzzyOptions(StreamInput in) throws IOException {
FuzzyOptions fuzzyOptions = new FuzzyOptions();
fuzzyOptions.readFrom(in);
return fuzzyOptions;
}
@Override
public FuzzyOptions readFrom(StreamInput in) throws IOException {
this.transpositions = in.readBoolean();
this.unicodeAware = in.readBoolean();
this.editDistance = in.readVInt();
this.fuzzyMinLength = in.readVInt();
this.fuzzyPrefixLength = in.readVInt();
this.maxDeterminizedStates = in.readVInt();
return this;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(transpositions);
out.writeBoolean(unicodeAware);
out.writeVInt(editDistance);
out.writeVInt(fuzzyMinLength);
out.writeVInt(fuzzyPrefixLength);
out.writeVInt(maxDeterminizedStates);
}
/**
* Options for fuzzy queries
*/
public static class Builder {
private int editDistance = FuzzyCompletionQuery.DEFAULT_MAX_EDITS;
private boolean transpositions = FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS;
private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH;
private int fuzzyPrefixLength = FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX;
private boolean unicodeAware = FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public Builder() {
}
/**
* Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance.
* The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one.
*/
public Builder setFuzziness(int editDistance) {
if (editDistance < 0 || editDistance > 2) {
throw new IllegalArgumentException("fuzziness must be between 0 and 2");
}
this.editDistance = editDistance;
return this;
}
/**
* Sets the level of fuzziness used to create suggestions using a {@link Fuzziness} instance.
* The default value is {@link Fuzziness#ONE} which allows for an "edit distance" of one.
*/
public Builder setFuzziness(Fuzziness fuzziness) {
Objects.requireNonNull(fuzziness, "fuzziness must not be null");
return setFuzziness(fuzziness.asDistance());
}
/**
* Sets if transpositions (swapping one character for another) counts as one character
* change or two.
* Defaults to true, meaning it uses the fuzzier option of counting transpositions as
* a single change.
*/
public Builder setTranspositions(boolean transpositions) {
this.transpositions = transpositions;
return this;
}
/**
* Sets the minimum length of input string before fuzzy suggestions are returned, defaulting
* to 3.
*/
public Builder setFuzzyMinLength(int fuzzyMinLength) {
if (fuzzyMinLength < 0) {
throw new IllegalArgumentException("fuzzyMinLength must not be negative");
}
this.fuzzyMinLength = fuzzyMinLength;
return this;
}
/**
* Sets the minimum length of the input, which is not checked for fuzzy alternatives, defaults to 1
*/
public Builder setFuzzyPrefixLength(int fuzzyPrefixLength) {
if (fuzzyPrefixLength < 0) {
throw new IllegalArgumentException("fuzzyPrefixLength must not be negative");
}
this.fuzzyPrefixLength = fuzzyPrefixLength;
return this;
}
/**
* Sets the maximum automaton states allowed for the fuzzy expansion
*/
public Builder setMaxDeterminizedStates(int maxDeterminizedStates) {
if (maxDeterminizedStates < 0) {
throw new IllegalArgumentException("maxDeterminizedStates must not be negative");
}
this.maxDeterminizedStates = maxDeterminizedStates;
return this;
}
/**
* Set to true if all measurements (like edit distance, transpositions and lengths) are in unicode
* code points (actual letters) instead of bytes. Default is false.
*/
public Builder setUnicodeAware(boolean unicodeAware) {
this.unicodeAware = unicodeAware;
return this;
}
public FuzzyOptions build() {
return new FuzzyOptions(editDistance, transpositions, fuzzyMinLength, fuzzyPrefixLength,
unicodeAware, maxDeterminizedStates);
}
}
}

View File

@ -0,0 +1,187 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.RegexpFlag;
import java.io.IOException;
/**
* Regular expression options for completion suggester
*/
public class RegexOptions implements ToXContent, Writeable<RegexOptions> {
static final ParseField REGEX_OPTIONS = new ParseField("regex");
private static final ParseField FLAGS_VALUE = new ParseField("flags", "flags_value");
private static final ParseField MAX_DETERMINIZED_STATES = new ParseField("max_determinized_states");
/**
* regex: {
* "flags" : STRING | INT
* "max_determinized_states" : INT
* }
*/
private static ObjectParser<Builder, Void> PARSER = new ObjectParser<>(REGEX_OPTIONS.getPreferredName(), Builder::new);
static {
PARSER.declareInt(Builder::setMaxDeterminizedStates, MAX_DETERMINIZED_STATES);
PARSER.declareField((parser, builder, aVoid) -> {
if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
builder.setFlags(parser.text());
} else if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
builder.setFlagsValue(parser.intValue());
} else {
throw new ElasticsearchParseException(REGEX_OPTIONS.getPreferredName()
+ " " + FLAGS_VALUE.getPreferredName() + " supports string or number");
}
}, FLAGS_VALUE, ObjectParser.ValueType.VALUE);
PARSER.declareStringOrNull(Builder::setFlags, FLAGS_VALUE);
}
private int flagsValue;
private int maxDeterminizedStates;
private RegexOptions() {
}
private RegexOptions(int flagsValue, int maxDeterminizedStates) {
this.flagsValue = flagsValue;
this.maxDeterminizedStates = maxDeterminizedStates;
}
/**
* Returns internal regular expression syntax flag value
* see {@link RegexpFlag#value()}
*/
public int getFlagsValue() {
return flagsValue;
}
/**
* Returns the maximum automaton states allowed for fuzzy expansion
*/
public int getMaxDeterminizedStates() {
return maxDeterminizedStates;
}
public static Builder builder() {
return new Builder();
}
static RegexOptions parse(XContentParser parser) throws IOException {
return PARSER.parse(parser).build();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RegexOptions that = (RegexOptions) o;
if (flagsValue != that.flagsValue) return false;
return maxDeterminizedStates == that.maxDeterminizedStates;
}
@Override
public int hashCode() {
int result = flagsValue;
result = 31 * result + maxDeterminizedStates;
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(REGEX_OPTIONS.getPreferredName());
builder.field(FLAGS_VALUE.getPreferredName(), flagsValue);
builder.field(MAX_DETERMINIZED_STATES.getPreferredName(), maxDeterminizedStates);
builder.endObject();
return builder;
}
public static RegexOptions readRegexOptions(StreamInput in) throws IOException {
RegexOptions regexOptions = new RegexOptions();
regexOptions.readFrom(in);
return regexOptions;
}
@Override
public RegexOptions readFrom(StreamInput in) throws IOException {
this.flagsValue = in.readVInt();
this.maxDeterminizedStates = in.readVInt();
return this;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(flagsValue);
out.writeVInt(maxDeterminizedStates);
}
/**
* Options for regular expression queries
*/
public static class Builder {
private int flagsValue = RegExp.ALL;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public Builder() {
}
/**
* Sets the regular expression syntax flags
* see {@link RegexpFlag}
*/
public Builder setFlags(String flags) {
this.flagsValue = RegexpFlag.resolveValue(flags);
return this;
}
private Builder setFlagsValue(int flagsValue) {
this.flagsValue = flagsValue;
return this;
}
/**
* Sets the maximum automaton states allowed for the regular expression expansion
*/
public Builder setMaxDeterminizedStates(int maxDeterminizedStates) {
if (maxDeterminizedStates < 0) {
throw new IllegalArgumentException("maxDeterminizedStates must not be negative");
}
this.maxDeterminizedStates = maxDeterminizedStates;
return this;
}
public RegexOptions build() {
return new RegexOptions(flagsValue, maxDeterminizedStates);
}
}
}

View File

@ -36,6 +36,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
/**
* A {@link ContextMapping} that uses a simple string as a criteria
@ -44,7 +45,7 @@ import java.util.Set;
* {@link CategoryQueryContext} defines options for constructing
* a unit of query context for this context type
*/
public class CategoryContextMapping extends ContextMapping {
public class CategoryContextMapping extends ContextMapping<CategoryQueryContext> {
private static final String FIELD_FIELDNAME = "path";
@ -137,6 +138,11 @@ public class CategoryContextMapping extends ContextMapping {
return (values == null) ? Collections.<CharSequence>emptySet() : values;
}
@Override
protected CategoryQueryContext prototype() {
return CategoryQueryContext.PROTOTYPE;
}
/**
* Parse a list of {@link CategoryQueryContext}
* using <code>parser</code>. A QueryContexts accepts one of the following forms:
@ -154,19 +160,13 @@ public class CategoryContextMapping extends ContextMapping {
* </ul>
*/
@Override
public List<QueryContext> parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException {
List<QueryContext> queryContexts = new ArrayList<>();
Token token = parser.nextToken();
if (token == Token.START_OBJECT || token == Token.VALUE_STRING) {
CategoryQueryContext parse = CategoryQueryContext.parse(parser);
queryContexts.add(new QueryContext(parse.getCategory().toString(), parse.getBoost(), parse.isPrefix()));
} else if (token == Token.START_ARRAY) {
while (parser.nextToken() != Token.END_ARRAY) {
CategoryQueryContext parse = CategoryQueryContext.parse(parser);
queryContexts.add(new QueryContext(parse.getCategory().toString(), parse.getBoost(), parse.isPrefix()));
}
}
return queryContexts;
public List<InternalQueryContext> toInternalQueryContexts(List<CategoryQueryContext> queryContexts) {
List<InternalQueryContext> internalInternalQueryContexts = new ArrayList<>(queryContexts.size());
internalInternalQueryContexts.addAll(
queryContexts.stream()
.map(queryContext -> new InternalQueryContext(queryContext.getCategory(), queryContext.getBoost(), queryContext.isPrefix()))
.collect(Collectors.toList()));
return internalInternalQueryContexts;
}
@Override

View File

@ -22,11 +22,11 @@ package org.elasticsearch.search.suggest.completion.context;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.search.suggest.completion.context.CategoryContextMapping.CONTEXT_BOOST;
import static org.elasticsearch.search.suggest.completion.context.CategoryContextMapping.CONTEXT_PREFIX;
@ -35,12 +35,15 @@ import static org.elasticsearch.search.suggest.completion.context.CategoryContex
/**
* Defines the query context for {@link CategoryContextMapping}
*/
public final class CategoryQueryContext implements ToXContent {
private final CharSequence category;
public final class CategoryQueryContext implements QueryContext {
public static final String NAME = "category";
public static final CategoryQueryContext PROTOTYPE = new CategoryQueryContext("", 1, false);
private final String category;
private final boolean isPrefix;
private final int boost;
private CategoryQueryContext(CharSequence category, int boost, boolean isPrefix) {
private CategoryQueryContext(String category, int boost, boolean isPrefix) {
this.category = category;
this.boost = boost;
this.isPrefix = isPrefix;
@ -49,7 +52,7 @@ public final class CategoryQueryContext implements ToXContent {
/**
* Returns the category of the context
*/
public CharSequence getCategory() {
public String getCategory() {
return category;
}
@ -71,54 +74,36 @@ public final class CategoryQueryContext implements ToXContent {
return new Builder();
}
public static class Builder {
private CharSequence category;
private boolean isPrefix = false;
private int boost = 1;
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
public Builder() {
}
CategoryQueryContext that = (CategoryQueryContext) o;
/**
* Sets the category of the context.
* This is a required field
*/
public Builder setCategory(CharSequence context) {
this.category = context;
return this;
}
if (isPrefix != that.isPrefix) return false;
if (boost != that.boost) return false;
return category != null ? category.equals(that.category) : that.category == null;
/**
* Sets if the context should be treated as a prefix or not.
* Defaults to false
*/
public Builder setPrefix(boolean prefix) {
this.isPrefix = prefix;
return this;
}
/**
* Sets the query-time boost of the context.
* Defaults to 1.
*/
public Builder setBoost(int boost) {
this.boost = boost;
return this;
}
public CategoryQueryContext build() {
return new CategoryQueryContext(category, boost, isPrefix);
}
}
private static ObjectParser<Builder, Void> CATEGORY_PARSER = new ObjectParser<>("category", null);
@Override
public int hashCode() {
int result = category != null ? category.hashCode() : 0;
result = 31 * result + (isPrefix ? 1 : 0);
result = 31 * result + boost;
return result;
}
private static ObjectParser<Builder, Void> CATEGORY_PARSER = new ObjectParser<>(NAME, null);
static {
CATEGORY_PARSER.declareString(Builder::setCategory, new ParseField("context"));
CATEGORY_PARSER.declareInt(Builder::setBoost, new ParseField("boost"));
CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField("prefix"));
CATEGORY_PARSER.declareString(Builder::setCategory, new ParseField(CONTEXT_VALUE));
CATEGORY_PARSER.declareInt(Builder::setBoost, new ParseField(CONTEXT_BOOST));
CATEGORY_PARSER.declareBoolean(Builder::setPrefix, new ParseField(CONTEXT_PREFIX));
}
public static CategoryQueryContext parse(XContentParser parser) throws IOException {
@Override
public CategoryQueryContext fromXContext(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
Builder builder = builder();
if (token == XContentParser.Token.START_OBJECT) {
@ -140,4 +125,49 @@ public final class CategoryQueryContext implements ToXContent {
builder.endObject();
return builder;
}
public static class Builder {
private String category;
private boolean isPrefix = false;
private int boost = 1;
public Builder() {
}
/**
* Sets the category of the category.
* This is a required field
*/
public Builder setCategory(String category) {
Objects.requireNonNull(category, "category must not be null");
this.category = category;
return this;
}
/**
* Sets if the context should be treated as a prefix or not.
* Defaults to false
*/
public Builder setPrefix(boolean prefix) {
this.isPrefix = prefix;
return this;
}
/**
* Sets the query-time boost of the context.
* Defaults to 1.
*/
public Builder setBoost(int boost) {
if (boost <= 0) {
throw new IllegalArgumentException("boost must be greater than 0");
}
this.boost = boost;
return this;
}
public CategoryQueryContext build() {
Objects.requireNonNull(category, "category must not be null");
return new CategoryQueryContext(category, boost, isPrefix);
}
}
}

View File

@ -23,11 +23,13 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
@ -38,7 +40,7 @@ import java.util.Set;
*
* Implementations have to define how contexts are parsed at query/index time
*/
public abstract class ContextMapping implements ToXContent {
public abstract class ContextMapping<T extends QueryContext> implements ToXContent {
public static final String FIELD_TYPE = "type";
public static final String FIELD_NAME = "name";
@ -94,10 +96,31 @@ public abstract class ContextMapping implements ToXContent {
*/
protected abstract Set<CharSequence> parseContext(ParseContext.Document document);
/**
* Prototype for the query context
*/
protected abstract T prototype();
/**
* Parses query contexts for this mapper
*/
public abstract List<QueryContext> parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException;
public final List<InternalQueryContext> parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException {
List<T> queryContexts = new ArrayList<>();
Token token = parser.nextToken();
if (token == Token.START_OBJECT || token == Token.VALUE_STRING) {
queryContexts.add((T) prototype().fromXContext(parser));
} else if (token == Token.START_ARRAY) {
while (parser.nextToken() != Token.END_ARRAY) {
queryContexts.add((T) prototype().fromXContext(parser));
}
}
return toInternalQueryContexts(queryContexts);
}
/**
* Convert query contexts to common representation
*/
protected abstract List<InternalQueryContext> toInternalQueryContexts(List<T> queryContexts);
/**
* Implementations should add specific configurations
@ -136,17 +159,38 @@ public abstract class ContextMapping implements ToXContent {
}
}
public static class QueryContext {
public static class InternalQueryContext {
public final String context;
public final int boost;
public final boolean isPrefix;
public QueryContext(String context, int boost, boolean isPrefix) {
public InternalQueryContext(String context, int boost, boolean isPrefix) {
this.context = context;
this.boost = boost;
this.isPrefix = isPrefix;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InternalQueryContext that = (InternalQueryContext) o;
if (boost != that.boost) return false;
if (isPrefix != that.isPrefix) return false;
return context != null ? context.equals(that.context) : that.context == null;
}
@Override
public int hashCode() {
int result = context != null ? context.hashCode() : 0;
result = 31 * result + boost;
result = 31 * result + (isPrefix ? 1 : 0);
return result;
}
@Override
public String toString() {
return "QueryContext{" +

View File

@ -43,7 +43,6 @@ import java.util.Set;
import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_NAME;
import static org.elasticsearch.search.suggest.completion.context.ContextMapping.FIELD_TYPE;
import static org.elasticsearch.search.suggest.completion.context.ContextMapping.QueryContext;
import static org.elasticsearch.search.suggest.completion.context.ContextMapping.Type;
/**
@ -153,7 +152,7 @@ public class ContextMappings implements ToXContent {
* @param queryContexts a map of context mapping name and collected query contexts
* @return a context-enabled query
*/
public ContextQuery toContextQuery(CompletionQuery query, Map<String, List<QueryContext>> queryContexts) {
public ContextQuery toContextQuery(CompletionQuery query, Map<String, List<ContextMapping.InternalQueryContext>> queryContexts) {
ContextQuery typedContextQuery = new ContextQuery(query);
if (queryContexts.isEmpty() == false) {
CharsRefBuilder scratch = new CharsRefBuilder();
@ -162,9 +161,9 @@ public class ContextMappings implements ToXContent {
scratch.setCharAt(0, (char) typeId);
scratch.setLength(1);
ContextMapping mapping = contextMappings.get(typeId);
List<QueryContext> queryContext = queryContexts.get(mapping.name());
if (queryContext != null) {
for (QueryContext context : queryContext) {
List<ContextMapping.InternalQueryContext> internalQueryContext = queryContexts.get(mapping.name());
if (internalQueryContext != null) {
for (ContextMapping.InternalQueryContext context : internalQueryContext) {
scratch.append(context.context);
typedContextQuery.addContext(scratch.toCharsRef(), context.boost, !context.isPrefix);
scratch.setLength(1);

View File

@ -42,6 +42,7 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.lucene.spatial.util.GeoHashUtils.addNeighbors;
import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
@ -56,7 +57,7 @@ import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
* {@link GeoQueryContext} defines the options for constructing
* a unit of query context for this context type
*/
public class GeoContextMapping extends ContextMapping {
public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
public static final String FIELD_PRECISION = "precision";
public static final String FIELD_FIELDNAME = "path";
@ -221,6 +222,11 @@ public class GeoContextMapping extends ContextMapping {
return locations;
}
@Override
protected GeoQueryContext prototype() {
return GeoQueryContext.PROTOTYPE;
}
/**
* Parse a list of {@link GeoQueryContext}
* using <code>parser</code>. A QueryContexts accepts one of the following forms:
@ -245,22 +251,10 @@ public class GeoContextMapping extends ContextMapping {
* see {@link GeoUtils#parseGeoPoint(String, GeoPoint)} for GEO POINT
*/
@Override
public List<QueryContext> parseQueryContext(XContentParser parser) throws IOException, ElasticsearchParseException {
List<GeoQueryContext> queryContexts = new ArrayList<>();
Token token = parser.nextToken();
if (token == Token.START_OBJECT || token == Token.VALUE_STRING) {
queryContexts.add(GeoQueryContext.parse(parser));
} else if (token == Token.START_ARRAY) {
while (parser.nextToken() != Token.END_ARRAY) {
queryContexts.add(GeoQueryContext.parse(parser));
}
}
List<QueryContext> queryContextList = new ArrayList<>();
public List<InternalQueryContext> toInternalQueryContexts(List<GeoQueryContext> queryContexts) {
List<InternalQueryContext> internalQueryContextList = new ArrayList<>();
for (GeoQueryContext queryContext : queryContexts) {
int minPrecision = this.precision;
if (queryContext.getPrecision() != -1) {
minPrecision = Math.min(minPrecision, queryContext.getPrecision());
}
int minPrecision = Math.min(this.precision, queryContext.getPrecision());
GeoPoint point = queryContext.getGeoPoint();
final Collection<String> locations = new HashSet<>();
String geoHash = stringEncode(point.getLon(), point.getLat(), minPrecision);
@ -268,19 +262,20 @@ public class GeoContextMapping extends ContextMapping {
if (queryContext.getNeighbours().isEmpty() && geoHash.length() == this.precision) {
addNeighbors(geoHash, locations);
} else if (queryContext.getNeighbours().isEmpty() == false) {
for (Integer neighbourPrecision : queryContext.getNeighbours()) {
if (neighbourPrecision < geoHash.length()) {
queryContext.getNeighbours().stream()
.filter(neighbourPrecision -> neighbourPrecision < geoHash.length())
.forEach(neighbourPrecision -> {
String truncatedGeoHash = geoHash.substring(0, neighbourPrecision);
locations.add(truncatedGeoHash);
addNeighbors(truncatedGeoHash, locations);
}
}
}
for (String location : locations) {
queryContextList.add(new QueryContext(location, queryContext.getBoost(), location.length() < this.precision));
});
}
internalQueryContextList.addAll(
locations.stream()
.map(location -> new InternalQueryContext(location, queryContext.getBoost(), location.length() < this.precision))
.collect(Collectors.toList()));
}
return queryContextList;
return internalQueryContextList;
}
@Override
@ -304,7 +299,7 @@ public class GeoContextMapping extends ContextMapping {
private int precision = DEFAULT_PRECISION;
private String fieldName = null;
protected Builder(String name) {
public Builder(String name) {
super(name);
}

View File

@ -24,13 +24,13 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_BOOST;
import static org.elasticsearch.search.suggest.completion.context.GeoContextMapping.CONTEXT_NEIGHBOURS;
@ -40,7 +40,10 @@ import static org.elasticsearch.search.suggest.completion.context.GeoContextMapp
/**
* Defines the query context for {@link GeoContextMapping}
*/
public final class GeoQueryContext implements ToXContent {
public final class GeoQueryContext implements QueryContext {
public static final String NAME = "geo";
public static final GeoQueryContext PROTOTYPE = new GeoQueryContext(null, 1, 12, Collections.emptyList());
private final GeoPoint geoPoint;
private final int boost;
private final int precision;
@ -81,90 +84,47 @@ public final class GeoQueryContext implements ToXContent {
return neighbours;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GeoQueryContext that = (GeoQueryContext) o;
if (boost != that.boost) return false;
if (precision != that.precision) return false;
if (geoPoint != null ? !geoPoint.equals(that.geoPoint) : that.geoPoint != null) return false;
return neighbours != null ? neighbours.equals(that.neighbours) : that.neighbours == null;
}
@Override
public int hashCode() {
int result = geoPoint != null ? geoPoint.hashCode() : 0;
result = 31 * result + boost;
result = 31 * result + precision;
result = 31 * result + (neighbours != null ? neighbours.hashCode() : 0);
return result;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private GeoPoint geoPoint;
private int boost = 1;
private int precision = -1;
private List<Integer> neighbours = Collections.emptyList();
public Builder() {
}
/**
* Sets the query-time boost for the context
* Defaults to 1
*/
public Builder setBoost(int boost) {
this.boost = boost;
return this;
}
/**
* Sets the precision level for computing the geohash from the context geo point.
* Defaults to using index-time precision level
*/
public Builder setPrecision(int precision) {
this.precision = precision;
return this;
}
/**
* Sets the precision levels at which geohash cells neighbours are considered.
* Defaults to only considering neighbours at the index-time precision level
*/
public Builder setNeighbours(List<Integer> neighbours) {
this.neighbours = neighbours;
return this;
}
/**
* Sets the geo point of the context.
* This is a required field
*/
public Builder setGeoPoint(GeoPoint geoPoint) {
this.geoPoint = geoPoint;
return this;
}
private double lat = Double.NaN;
void setLat(double lat) {
this.lat = lat;
}
private double lon = Double.NaN;
void setLon(double lon) {
this.lon = lon;
}
public GeoQueryContext build() {
if (geoPoint == null) {
if (Double.isNaN(lat) == false && Double.isNaN(lon) == false) {
geoPoint = new GeoPoint(lat, lon);
} else {
throw new IllegalArgumentException("no geohash or geo point provided");
}
}
return new GeoQueryContext(geoPoint, boost, precision, neighbours);
}
}
private static ObjectParser<GeoQueryContext.Builder, Void> GEO_CONTEXT_PARSER = new ObjectParser<>("geo", null);
private static ObjectParser<GeoQueryContext.Builder, Void> GEO_CONTEXT_PARSER = new ObjectParser<>(NAME, null);
static {
GEO_CONTEXT_PARSER.declareField((parser, geoQueryContext, geoContextMapping) -> geoQueryContext.setGeoPoint(GeoUtils.parseGeoPoint(parser)), new ParseField("context"), ObjectParser.ValueType.OBJECT);
GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setBoost, new ParseField("boost"));
GEO_CONTEXT_PARSER.declareField((parser, geoQueryContext, geoContextMapping) -> geoQueryContext.setGeoPoint(GeoUtils.parseGeoPoint(parser)), new ParseField(CONTEXT_VALUE), ObjectParser.ValueType.OBJECT);
GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setBoost, new ParseField(CONTEXT_BOOST));
// TODO : add string support for precision for GeoUtils.geoHashLevelsForPrecision()
GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setPrecision, new ParseField("precision"));
GEO_CONTEXT_PARSER.declareInt(GeoQueryContext.Builder::setPrecision, new ParseField(CONTEXT_PRECISION));
// TODO : add string array support for precision for GeoUtils.geoHashLevelsForPrecision()
GEO_CONTEXT_PARSER.declareIntArray(GeoQueryContext.Builder::setNeighbours, new ParseField("neighbours"));
GEO_CONTEXT_PARSER.declareIntArray(GeoQueryContext.Builder::setNeighbours, new ParseField(CONTEXT_NEIGHBOURS));
GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLat, new ParseField("lat"));
GEO_CONTEXT_PARSER.declareDouble(GeoQueryContext.Builder::setLon, new ParseField("lon"));
}
public static GeoQueryContext parse(XContentParser parser) throws IOException {
@Override
public GeoQueryContext fromXContext(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
GeoQueryContext.Builder builder = new Builder();
if (token == XContentParser.Token.START_OBJECT) {
@ -190,4 +150,82 @@ public final class GeoQueryContext implements ToXContent {
builder.endObject();
return builder;
}
public static class Builder {
private GeoPoint geoPoint;
private int boost = 1;
private int precision = 12;
private List<Integer> neighbours = Collections.emptyList();
public Builder() {
}
/**
* Sets the query-time boost for the context
* Defaults to 1
*/
public Builder setBoost(int boost) {
if (boost <= 0) {
throw new IllegalArgumentException("boost must be greater than 0");
}
this.boost = boost;
return this;
}
/**
* Sets the precision level for computing the geohash from the context geo point.
* Defaults to using index-time precision level
*/
public Builder setPrecision(int precision) {
if (precision < 1 || precision > 12) {
throw new IllegalArgumentException("precision must be between 1 and 12");
}
this.precision = precision;
return this;
}
/**
* Sets the precision levels at which geohash cells neighbours are considered.
* Defaults to only considering neighbours at the index-time precision level
*/
public Builder setNeighbours(List<Integer> neighbours) {
for (int neighbour : neighbours) {
if (neighbour < 1 || neighbour > 12) {
throw new IllegalArgumentException("neighbour value must be between 1 and 12");
}
}
this.neighbours = neighbours;
return this;
}
/**
* Sets the geo point of the context.
* This is a required field
*/
public Builder setGeoPoint(GeoPoint geoPoint) {
Objects.requireNonNull(geoPoint, "geoPoint must not be null");
this.geoPoint = geoPoint;
return this;
}
private double lat = Double.NaN;
void setLat(double lat) {
this.lat = lat;
}
private double lon = Double.NaN;
void setLon(double lon) {
this.lon = lon;
}
public GeoQueryContext build() {
if (geoPoint == null) {
if (Double.isNaN(lat) == false && Double.isNaN(lon) == false) {
geoPoint = new GeoPoint(lat, lon);
}
}
Objects.requireNonNull(geoPoint, "geoPoint must not be null");
return new GeoQueryContext(geoPoint, boost, precision, neighbours);
}
}
}

View File

@ -16,15 +16,18 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
package org.elasticsearch.search.suggest.completion.context;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import java.io.IOException;
public interface SuggestContextParser {
SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService indexFieldDataService) throws IOException;
/**
* Interface for serializing/de-serializing completion query context
*/
public interface QueryContext extends ToXContent {
QueryContext fromXContext(XContentParser parser) throws IOException;
}

View File

@ -178,7 +178,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
protected long thresholdFrequency(long termFrequency, long dictionarySize) {
if (termFrequency > 0) {
return (long) Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1));
return Math.max(0, Math.round(termFrequency * (Math.log10(termFrequency - frequencyPlateau) * (1.0 / Math.log10(logBase))) + 1));
}
return 0;

View File

@ -25,13 +25,12 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.CandidateGenerator;
@ -42,7 +41,7 @@ import java.util.Set;
import java.util.function.Consumer;
public final class DirectCandidateGeneratorBuilder
implements Writeable<DirectCandidateGeneratorBuilder>, CandidateGenerator {
implements CandidateGenerator {
private static final String TYPE = "direct_generator";
static final DirectCandidateGeneratorBuilder PROTOTYPE = new DirectCandidateGeneratorBuilder("_na_");
@ -350,8 +349,7 @@ public final class DirectCandidateGeneratorBuilder
return replaceField(tmpFieldName.iterator().next(), tempGenerator);
}
public PhraseSuggestionContext.DirectCandidateGenerator build(QueryShardContext context) throws IOException {
MapperService mapperService = context.getMapperService();
public PhraseSuggestionContext.DirectCandidateGenerator build(MapperService mapperService) throws IOException {
PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator();
generator.setField(this.field);
transferIfNotNull(this.size, generator::size);
@ -372,7 +370,7 @@ public final class DirectCandidateGeneratorBuilder
generator.suggestMode(SuggestUtils.resolveSuggestMode(this.suggestMode));
}
if (this.sort != null) {
generator.sort(SuggestUtils.resolveSort(this.sort));
generator.sort(SortBy.resolve(this.sort));
}
if (this.stringDistance != null) {
generator.stringDistance(SuggestUtils.resolveDistance(this.stringDistance));
@ -384,7 +382,7 @@ public final class DirectCandidateGeneratorBuilder
transferIfNotNull(this.maxInspections, generator::maxInspections);
transferIfNotNull(this.maxTermFreq, generator::maxTermFreq);
transferIfNotNull(this.prefixLength, generator::prefixLength);
transferIfNotNull(this.minWordLength, generator::minQueryLength);
transferIfNotNull(this.minWordLength, generator::minWordLength);
transferIfNotNull(this.minDocFreq, generator::minDocFreq);
return generator;
}
@ -490,4 +488,4 @@ public final class DirectCandidateGeneratorBuilder
Objects.equals(minWordLength, other.minWordLength) &&
Objects.equals(minDocFreq, other.minDocFreq);
}
}
}

View File

@ -0,0 +1,126 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
import java.io.IOException;
import java.util.Objects;
/**
* An <a href="http://en.wikipedia.org/wiki/Additive_smoothing">additive
* smoothing</a> model.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public final class Laplace extends SmoothingModel {
private double alpha = DEFAULT_LAPLACE_ALPHA;
private static final String NAME = "laplace";
private static final ParseField ALPHA_FIELD = new ParseField("alpha");
static final ParseField PARSE_FIELD = new ParseField(NAME);
/**
* Default alpha parameter for laplace smoothing
*/
public static final double DEFAULT_LAPLACE_ALPHA = 0.5;
public static final Laplace PROTOTYPE = new Laplace(DEFAULT_LAPLACE_ALPHA);
/**
* Creates a Laplace smoothing model.
*
*/
public Laplace(double alpha) {
this.alpha = alpha;
}
/**
* @return the laplace model alpha parameter
*/
public double getAlpha() {
return this.alpha;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(ALPHA_FIELD.getPreferredName(), alpha);
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(alpha);
}
@Override
public SmoothingModel readFrom(StreamInput in) throws IOException {
return new Laplace(in.readDouble());
}
@Override
protected boolean doEquals(SmoothingModel other) {
Laplace otherModel = (Laplace) other;
return Objects.equals(alpha, otherModel.alpha);
}
@Override
protected final int doHashCode() {
return Objects.hash(alpha);
}
@Override
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
String fieldName = null;
double alpha = DEFAULT_LAPLACE_ALPHA;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, ALPHA_FIELD)) {
alpha = parser.doubleValue();
}
}
return new Laplace(alpha);
}
@Override
public WordScorerFactory buildWordScorerFactory() {
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
-> new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha);
}
}

View File

@ -0,0 +1,176 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
import java.io.IOException;
import java.util.Objects;
/**
* Linear interpolation smoothing model.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public final class LinearInterpolation extends SmoothingModel {
private static final String NAME = "linear";
public static final LinearInterpolation PROTOTYPE = new LinearInterpolation(0.8, 0.1, 0.1);
private final double trigramLambda;
private final double bigramLambda;
private final double unigramLambda;
static final ParseField PARSE_FIELD = new ParseField(NAME);
private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda");
private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda");
private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda");
/**
* Creates a linear interpolation smoothing model.
*
* Note: the lambdas must sum up to one.
*
* @param trigramLambda
* the trigram lambda
* @param bigramLambda
* the bigram lambda
* @param unigramLambda
* the unigram lambda
*/
public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) {
double sum = trigramLambda + bigramLambda + unigramLambda;
if (Math.abs(sum - 1.0) > 0.001) {
throw new IllegalArgumentException("linear smoothing lambdas must sum to 1");
}
this.trigramLambda = trigramLambda;
this.bigramLambda = bigramLambda;
this.unigramLambda = unigramLambda;
}
public double getTrigramLambda() {
return this.trigramLambda;
}
public double getBigramLambda() {
return this.bigramLambda;
}
public double getUnigramLambda() {
return this.unigramLambda;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(TRIGRAM_FIELD.getPreferredName(), trigramLambda);
builder.field(BIGRAM_FIELD.getPreferredName(), bigramLambda);
builder.field(UNIGRAM_FIELD.getPreferredName(), unigramLambda);
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(trigramLambda);
out.writeDouble(bigramLambda);
out.writeDouble(unigramLambda);
}
@Override
public LinearInterpolation readFrom(StreamInput in) throws IOException {
return new LinearInterpolation(in.readDouble(), in.readDouble(), in.readDouble());
}
@Override
protected boolean doEquals(SmoothingModel other) {
final LinearInterpolation otherModel = (LinearInterpolation) other;
return Objects.equals(trigramLambda, otherModel.trigramLambda) &&
Objects.equals(bigramLambda, otherModel.bigramLambda) &&
Objects.equals(unigramLambda, otherModel.unigramLambda);
}
@Override
protected final int doHashCode() {
return Objects.hash(trigramLambda, bigramLambda, unigramLambda);
}
@Override
public LinearInterpolation innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
String fieldName = null;
double trigramLambda = 0.0;
double bigramLambda = 0.0;
double unigramLambda = 0.0;
ParseFieldMatcher matcher = parseContext.parseFieldMatcher();
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if (matcher.match(fieldName, TRIGRAM_FIELD)) {
trigramLambda = parser.doubleValue();
if (trigramLambda < 0) {
throw new IllegalArgumentException("trigram_lambda must be positive");
}
} else if (matcher.match(fieldName, BIGRAM_FIELD)) {
bigramLambda = parser.doubleValue();
if (bigramLambda < 0) {
throw new IllegalArgumentException("bigram_lambda must be positive");
}
} else if (matcher.match(fieldName, UNIGRAM_FIELD)) {
unigramLambda = parser.doubleValue();
if (unigramLambda < 0) {
throw new IllegalArgumentException("unigram_lambda must be positive");
}
} else {
throw new IllegalArgumentException(
"suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"[" + NAME + "] unknown token [" + token + "] after [" + fieldName + "]");
}
}
return new LinearInterpolation(trigramLambda, bigramLambda, unigramLambda);
}
@Override
public WordScorerFactory buildWordScorerFactory() {
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) ->
new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, trigramLambda, bigramLambda,
unigramLambda);
}
}

View File

@ -1,358 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import java.io.IOException;
import java.util.Collections;
public final class PhraseSuggestParser implements SuggestContextParser {
private PhraseSuggester suggester;
public PhraseSuggestParser(PhraseSuggester suggester) {
this.suggester = suggester;
}
@Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException {
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
XContentParser.Token token;
String fieldName = null;
boolean gramSizeSet = false;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if (!SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher)) {
if ("real_word_error_likelihood".equals(fieldName) || "realWorldErrorLikelihood".equals(fieldName)) {
suggestion.setRealWordErrorLikelihood(parser.floatValue());
if (suggestion.realworldErrorLikelyhood() <= 0.0) {
throw new IllegalArgumentException("real_word_error_likelihood must be > 0.0");
}
} else if ("confidence".equals(fieldName)) {
suggestion.setConfidence(parser.floatValue());
if (suggestion.confidence() < 0.0) {
throw new IllegalArgumentException("confidence must be >= 0.0");
}
} else if ("separator".equals(fieldName)) {
suggestion.setSeparator(new BytesRef(parser.text()));
} else if ("max_errors".equals(fieldName) || "maxErrors".equals(fieldName)) {
suggestion.setMaxErrors(parser.floatValue());
if (suggestion.maxErrors() <= 0.0) {
throw new IllegalArgumentException("max_error must be > 0.0");
}
} else if ("gram_size".equals(fieldName) || "gramSize".equals(fieldName)) {
suggestion.setGramSize(parser.intValue());
if (suggestion.gramSize() < 1) {
throw new IllegalArgumentException("gram_size must be >= 1");
}
gramSizeSet = true;
} else if ("force_unigrams".equals(fieldName) || "forceUnigrams".equals(fieldName)) {
suggestion.setRequireUnigram(parser.booleanValue());
} else if ("token_limit".equals(fieldName) || "tokenLimit".equals(fieldName)) {
int tokenLimit = parser.intValue();
if (tokenLimit <= 0) {
throw new IllegalArgumentException("token_limit must be >= 1");
}
suggestion.setTokenLimit(tokenLimit);
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]");
}
}
} else if (token == Token.START_ARRAY) {
if (parseFieldMatcher.match(fieldName, DirectCandidateGeneratorBuilder.DIRECT_GENERATOR_FIELD)) {
// for now we only have a single type of generators
while ((token = parser.nextToken()) == Token.START_OBJECT) {
PhraseSuggestionContext.DirectCandidateGenerator generator = parseCandidateGenerator(parser, mapperService, parseFieldMatcher);
verifyGenerator(generator);
suggestion.addGenerator(generator);
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]");
}
} else if (token == Token.START_OBJECT) {
if ("smoothing".equals(fieldName)) {
parseSmoothingModel(parser, suggestion, fieldName);
} else if ("highlight".equals(fieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
if ("pre_tag".equals(fieldName) || "preTag".equals(fieldName)) {
suggestion.setPreTag(parser.utf8Bytes());
} else if ("post_tag".equals(fieldName) || "postTag".equals(fieldName)) {
suggestion.setPostTag(parser.utf8Bytes());
} else {
throw new IllegalArgumentException(
"suggester[phrase][highlight] doesn't support field [" + fieldName + "]");
}
}
}
} else if ("collate".equals(fieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if ("query".equals(fieldName)) {
if (suggestion.getCollateQueryScript() != null) {
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
}
Template template = Template.parse(parser, parseFieldMatcher);
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap());
suggestion.setCollateQueryScript(compiledScript);
} else if ("params".equals(fieldName)) {
suggestion.setCollateScriptParams(parser.map());
} else if ("prune".equals(fieldName)) {
if (parser.isBooleanValue()) {
suggestion.setCollatePrune(parser.booleanValue());
} else {
throw new IllegalArgumentException("suggester[phrase][collate] prune must be either 'true' or 'false'");
}
} else {
throw new IllegalArgumentException(
"suggester[phrase][collate] doesn't support field [" + fieldName + "]");
}
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support array field [" + fieldName + "]");
}
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]");
}
}
if (suggestion.getField() == null) {
throw new IllegalArgumentException("The required field option is missing");
}
MappedFieldType fieldType = mapperService.fullName(suggestion.getField());
if (fieldType == null) {
throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]");
} else if (suggestion.getAnalyzer() == null) {
// no analyzer name passed in, so try the field's analyzer, or the default analyzer
if (fieldType.searchAnalyzer() == null) {
suggestion.setAnalyzer(mapperService.searchAnalyzer());
} else {
suggestion.setAnalyzer(fieldType.searchAnalyzer());
}
}
if (suggestion.model() == null) {
suggestion.setModel(StupidBackoffScorer.FACTORY);
}
if (!gramSizeSet || suggestion.generators().isEmpty()) {
final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils.getShingleFilterFactory(suggestion.getAnalyzer());
if (!gramSizeSet) {
// try to detect the shingle size
if (shingleFilterFactory != null) {
suggestion.setGramSize(shingleFilterFactory.getMaxShingleSize());
if (suggestion.getAnalyzer() == null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams()) {
throw new IllegalArgumentException("The default analyzer for field: [" + suggestion.getField() + "] doesn't emit unigrams. If this is intentional try to set the analyzer explicitly");
}
}
}
if (suggestion.generators().isEmpty()) {
if (shingleFilterFactory != null && shingleFilterFactory.getMinShingleSize() > 1 && !shingleFilterFactory.getOutputUnigrams() && suggestion.getRequireUnigram()) {
throw new IllegalArgumentException("The default candidate generator for phrase suggest can't operate on field: [" + suggestion.getField() + "] since it doesn't emit unigrams. If this is intentional try to set the candidate generator field explicitly");
}
// use a default generator on the same field
DirectCandidateGenerator generator = new DirectCandidateGenerator();
generator.setField(suggestion.getField());
suggestion.addGenerator(generator);
}
}
return suggestion;
}
public void parseSmoothingModel(XContentParser parser, PhraseSuggestionContext suggestion, String fieldName) throws IOException {
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
if ("linear".equals(fieldName)) {
ensureNoSmoothing(suggestion);
final double[] lambdas = new double[3];
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue()) {
if ("trigram_lambda".equals(fieldName) || "trigramLambda".equals(fieldName)) {
lambdas[0] = parser.doubleValue();
if (lambdas[0] < 0) {
throw new IllegalArgumentException("trigram_lambda must be positive");
}
} else if ("bigram_lambda".equals(fieldName) || "bigramLambda".equals(fieldName)) {
lambdas[1] = parser.doubleValue();
if (lambdas[1] < 0) {
throw new IllegalArgumentException("bigram_lambda must be positive");
}
} else if ("unigram_lambda".equals(fieldName) || "unigramLambda".equals(fieldName)) {
lambdas[2] = parser.doubleValue();
if (lambdas[2] < 0) {
throw new IllegalArgumentException("unigram_lambda must be positive");
}
} else {
throw new IllegalArgumentException(
"suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]");
}
}
}
double sum = 0.0d;
for (int i = 0; i < lambdas.length; i++) {
sum += lambdas[i];
}
if (Math.abs(sum - 1.0) > 0.001) {
throw new IllegalArgumentException("linear smoothing lambdas must sum to 1");
}
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, lambdas[0], lambdas[1],
lambdas[2]);
}
});
} else if ("laplace".equals(fieldName)) {
ensureNoSmoothing(suggestion);
double theAlpha = Laplace.DEFAULT_LAPLACE_ALPHA;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && "alpha".equals(fieldName)) {
theAlpha = parser.doubleValue();
}
}
final double alpha = theAlpha;
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha);
}
});
} else if ("stupid_backoff".equals(fieldName) || "stupidBackoff".equals(fieldName)) {
ensureNoSmoothing(suggestion);
double theDiscount = StupidBackoff.DEFAULT_BACKOFF_DISCOUNT;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && "discount".equals(fieldName)) {
theDiscount = parser.doubleValue();
}
}
final double discount = theDiscount;
suggestion.setModel(new WordScorer.WordScorerFactory() {
@Override
public WordScorer newScorer(IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
throws IOException {
return new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount);
}
});
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
}
}
}
}
private void ensureNoSmoothing(PhraseSuggestionContext suggestion) {
if (suggestion.model() != null) {
throw new IllegalArgumentException("only one smoothing model supported");
}
}
private void verifyGenerator(PhraseSuggestionContext.DirectCandidateGenerator suggestion) {
// Verify options and set defaults
if (suggestion.field() == null) {
throw new IllegalArgumentException("The required field option is missing");
}
}
static PhraseSuggestionContext.DirectCandidateGenerator parseCandidateGenerator(XContentParser parser, MapperService mapperService,
ParseFieldMatcher parseFieldMatcher) throws IOException {
XContentParser.Token token;
String fieldName = null;
PhraseSuggestionContext.DirectCandidateGenerator generator = new PhraseSuggestionContext.DirectCandidateGenerator();
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue()) {
if (!SuggestUtils.parseDirectSpellcheckerSettings(parser, fieldName, generator, parseFieldMatcher)) {
if ("field".equals(fieldName)) {
generator.setField(parser.text());
if (mapperService.fullName(generator.field()) == null) {
throw new IllegalArgumentException("No mapping found for field [" + generator.field() + "]");
}
} else if ("size".equals(fieldName)) {
generator.size(parser.intValue());
} else if ("pre_filter".equals(fieldName) || "preFilter".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
generator.preFilter(analyzer);
} else if ("post_filter".equals(fieldName) || "postFilter".equals(fieldName)) {
String analyzerName = parser.text();
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
if (analyzer == null) {
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
}
generator.postFilter(analyzer);
} else {
throw new IllegalArgumentException("CandidateGenerator doesn't support [" + fieldName + "]");
}
}
}
}
return generator;
}
}

View File

@ -31,18 +31,17 @@ import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.phrase.NoisyChannelSpellChecker.Result;
import java.io.IOException;
@ -53,13 +52,8 @@ import java.util.Map;
public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
private final BytesRef SEPARATOR = new BytesRef(" ");
private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion";
private final ScriptService scriptService;
private final IndicesService indicesService;
public PhraseSuggester(ScriptService scriptService, IndicesService indicesService) {
this.scriptService = scriptService;
this.indicesService = indicesService;
}
public static final PhraseSuggester PROTOTYPE = new PhraseSuggester();
/*
* More Ideas:
@ -70,8 +64,8 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
* - phonetic filters could be interesting here too for candidate selection
*/
@Override
public Suggestion<? extends Entry<? extends Option>> innerExecute(String name, PhraseSuggestionContext suggestion, IndexSearcher searcher,
CharsRefBuilder spare) throws IOException {
public Suggestion<? extends Entry<? extends Option>> innerExecute(String name, PhraseSuggestionContext suggestion,
IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
double realWordErrorLikelihood = suggestion.realworldErrorLikelyhood();
final PhraseSuggestion response = new PhraseSuggestion(name, suggestion.getSize());
final IndexReader indexReader = searcher.getIndexReader();
@ -90,14 +84,16 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
final String suggestField = suggestion.getField();
final Terms suggestTerms = MultiFields.getTerms(indexReader, suggestField);
if (gens.size() > 0 && suggestTerms != null) {
final NoisyChannelSpellChecker checker = new NoisyChannelSpellChecker(realWordErrorLikelihood, suggestion.getRequireUnigram(), suggestion.getTokenLimit());
final NoisyChannelSpellChecker checker = new NoisyChannelSpellChecker(realWordErrorLikelihood, suggestion.getRequireUnigram(),
suggestion.getTokenLimit());
final BytesRef separator = suggestion.separator();
WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator);
WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood,
separator);
Result checkerResult;
try (TokenStream stream = checker.tokenStream(suggestion.getAnalyzer(), suggestion.getText(), spare, suggestion.getField())) {
checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(),
gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(),
suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize());
checkerResult = checker.getCorrections(stream,
new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), gens.toArray(new CandidateGenerator[gens.size()])),
suggestion.maxErrors(), suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize());
}
PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore);
@ -115,10 +111,10 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
// from the index for a correction, collateMatch is updated
final Map<String, Object> vars = suggestion.getCollateScriptParams();
vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
ScriptService scriptService = suggestion.getShardContext().getScriptService();
final ExecutableScript executable = scriptService.executable(collateScript, vars);
final BytesReference querySource = (BytesReference) executable.run();
IndexService indexService = indicesService.indexService(suggestion.getShard().getIndex());
final ParsedQuery parsedQuery = indexService.newQueryShardContext().parse(querySource);
final ParsedQuery parsedQuery = suggestion.getShardContext().parse(querySource);
collateMatch = Lucene.exists(searcher, parsedQuery.query());
}
if (!collateMatch && !collatePrune) {
@ -142,18 +138,14 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
return response;
}
private PhraseSuggestion.Entry buildResultEntry(PhraseSuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) {
private PhraseSuggestion.Entry buildResultEntry(SuggestionContext suggestion, CharsRefBuilder spare, double cutoffScore) {
spare.copyUTF8Bytes(suggestion.getText());
return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore);
}
ScriptService scriptService() {
return scriptService;
}
@Override
public SuggestContextParser getContextParser() {
return new PhraseSuggestParser(this);
public SuggestionBuilder<?> getBuilderPrototype() {
return PhraseSuggestionBuilder.PROTOTYPE;
}
}

View File

@ -20,9 +20,9 @@ package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.util.ArrayList;
@ -31,26 +31,31 @@ import java.util.List;
import java.util.Map;
class PhraseSuggestionContext extends SuggestionContext {
private final BytesRef SEPARATOR = new BytesRef(" ");
private float maxErrors = 0.5f;
private BytesRef separator = SEPARATOR;
private float realworldErrorLikelihood = 0.95f;
private List<DirectCandidateGenerator> generators = new ArrayList<>();
private int gramSize = 1;
private float confidence = 1.0f;
static final boolean DEFAULT_COLLATE_PRUNE = false;
static final boolean DEFAULT_REQUIRE_UNIGRAM = true;
static final float DEFAULT_CONFIDENCE = 1.0f;
static final int DEFAULT_GRAM_SIZE = 1;
static final float DEFAULT_RWE_ERRORLIKELIHOOD = 0.95f;
static final float DEFAULT_MAX_ERRORS = 0.5f;
static final String DEFAULT_SEPARATOR = " ";
private float maxErrors = DEFAULT_MAX_ERRORS;
private BytesRef separator = new BytesRef(DEFAULT_SEPARATOR);
private float realworldErrorLikelihood = DEFAULT_RWE_ERRORLIKELIHOOD;
private int gramSize = DEFAULT_GRAM_SIZE;
private float confidence = DEFAULT_CONFIDENCE;
private int tokenLimit = NoisyChannelSpellChecker.DEFAULT_TOKEN_LIMIT;
private boolean requireUnigram = DEFAULT_REQUIRE_UNIGRAM;
private BytesRef preTag;
private BytesRef postTag;
private CompiledScript collateQueryScript;
private boolean prune = DEFAULT_COLLATE_PRUNE;
private List<DirectCandidateGenerator> generators = new ArrayList<>();
private Map<String, Object> collateScriptParams = new HashMap<>(1);
private WordScorer.WordScorerFactory scorer;
private boolean requireUnigram = true;
private boolean prune = false;
public PhraseSuggestionContext(Suggester<? extends PhraseSuggestionContext> suggester) {
super(suggester);
public PhraseSuggestionContext(QueryShardContext shardContext) {
super(PhraseSuggester.PROTOTYPE, shardContext);
}
public float maxErrors() {
@ -149,8 +154,6 @@ class PhraseSuggestionContext extends SuggestionContext {
public void postFilter(Analyzer postFilter) {
this.postFilter = postFilter;
}
}
public void setRequireUnigram(boolean requireUnigram) {
@ -198,7 +201,7 @@ class PhraseSuggestionContext extends SuggestionContext {
}
void setCollateScriptParams(Map<String, Object> collateScriptParams) {
this.collateScriptParams = collateScriptParams;
this.collateScriptParams = new HashMap<>(collateScriptParams);
}
void setCollatePrune(boolean prune) {
@ -208,5 +211,4 @@ class PhraseSuggestionContext extends SuggestionContext {
boolean collatePrune() {
return prune;
}
}

View File

@ -0,0 +1,105 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
import java.io.IOException;
public abstract class SmoothingModel implements NamedWriteable<SmoothingModel>, ToXContent {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getWriteableName());
innerToXContent(builder,params);
builder.endObject();
return builder;
}
@Override
public final boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SmoothingModel other = (SmoothingModel) obj;
return doEquals(other);
}
@Override
public final int hashCode() {
/*
* Override hashCode here and forward to an abstract method to force
* extensions of this class to override hashCode in the same way that we
* force them to override equals. This also prevents false positives in
* CheckStyle's EqualsHashCode check.
*/
return doHashCode();
}
protected abstract int doHashCode();
public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
XContentParser.Token token;
String fieldName = null;
SmoothingModel model = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) {
model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) {
model = Laplace.PROTOTYPE.innerFromXContent(parseContext);
} else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) {
model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext);
} else {
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"[smoothing] unknown token [" + token + "] after [" + fieldName + "]");
}
}
return model;
}
public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException;
public abstract WordScorerFactory buildWordScorerFactory();
/**
* subtype specific implementation of "equals".
*/
protected abstract boolean doEquals(SmoothingModel other);
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
}

View File

@ -0,0 +1,129 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
import java.io.IOException;
import java.util.Objects;
/**
* A "stupid-backoff" smoothing model similar to <a
* href="http://en.wikipedia.org/wiki/Katz's_back-off_model"> Katz's
* Backoff</a>. This model is used as the default if no model is configured.
* <p>
* See <a
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
* Smoothing</a> for details.
* </p>
*/
public final class StupidBackoff extends SmoothingModel {
/**
* Default discount parameter for {@link StupidBackoff} smoothing
*/
public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4;
public static final StupidBackoff PROTOTYPE = new StupidBackoff(DEFAULT_BACKOFF_DISCOUNT);
private double discount = DEFAULT_BACKOFF_DISCOUNT;
private static final String NAME = "stupid_backoff";
private static final ParseField DISCOUNT_FIELD = new ParseField("discount");
static final ParseField PARSE_FIELD = new ParseField(NAME);
/**
* Creates a Stupid-Backoff smoothing model.
*
* @param discount
* the discount given to lower order ngrams if the higher order ngram doesn't exits
*/
public StupidBackoff(double discount) {
this.discount = discount;
}
/**
* @return the discount parameter of the model
*/
public double getDiscount() {
return this.discount;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(DISCOUNT_FIELD.getPreferredName(), discount);
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(discount);
}
@Override
public StupidBackoff readFrom(StreamInput in) throws IOException {
return new StupidBackoff(in.readDouble());
}
@Override
protected boolean doEquals(SmoothingModel other) {
StupidBackoff otherModel = (StupidBackoff) other;
return Objects.equals(discount, otherModel.discount);
}
@Override
protected final int doHashCode() {
return Objects.hash(discount);
}
@Override
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
XContentParser.Token token;
String fieldName = null;
double discount = DEFAULT_BACKOFF_DISCOUNT;
while ((token = parser.nextToken()) != Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
}
if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, DISCOUNT_FIELD)) {
discount = parser.doubleValue();
}
}
return new StupidBackoff(discount);
}
@Override
public WordScorerFactory buildWordScorerFactory() {
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
-> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount);
}
}

View File

@ -77,7 +77,7 @@ public abstract class WordScorer {
}
return candidate.stringDistance;
}
public double score(Candidate[] path, CandidateSet[] candidateSet, int at, int gramSize) throws IOException {
if (at == 0 || gramSize == 1) {
return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreUnigram(path[at]));
@ -87,21 +87,21 @@ public abstract class WordScorer {
return Math.log10(channelScore(path[at], candidateSet[at].originalTerm) * scoreTrigram(path[at], path[at - 1], path[at - 2]));
}
}
protected double scoreUnigram(Candidate word) throws IOException {
return (1.0 + frequency(word.term)) / (vocabluarySize + numTerms);
}
protected double scoreBigram(Candidate word, Candidate w_1) throws IOException {
return scoreUnigram(word);
}
protected double scoreTrigram(Candidate word, Candidate w_1, Candidate w_2) throws IOException {
return scoreBigram(word, w_1);
}
public static interface WordScorerFactory {
public WordScorer newScorer(IndexReader reader, Terms terms,
String field, double realWordLikelyhood, BytesRef separator) throws IOException;
public interface WordScorerFactory {
WordScorer newScorer(IndexReader reader, Terms terms,
String field, double realWordLikelyhood, BytesRef separator) throws IOException;
}
}

View File

@ -1,67 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.term;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import java.io.IOException;
public final class TermSuggestParser implements SuggestContextParser {
private TermSuggester suggester;
public TermSuggestParser(TermSuggester suggester) {
this.suggester = suggester;
}
@Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexFieldDataService fieldDataService) throws IOException {
XContentParser.Token token;
String fieldName = null;
TermSuggestionContext suggestion = new TermSuggestionContext(suggester);
DirectSpellcheckerSettings settings = suggestion.getDirectSpellCheckerSettings();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token.isValue()) {
parseTokenValue(parser, mapperService, fieldName, suggestion, settings, mapperService.getIndexSettings().getParseFieldMatcher());
} else {
throw new IllegalArgumentException("suggester[term] doesn't support field [" + fieldName + "]");
}
}
return suggestion;
}
private void parseTokenValue(XContentParser parser, MapperService mapperService, String fieldName, TermSuggestionContext suggestion,
DirectSpellcheckerSettings settings, ParseFieldMatcher parseFieldMatcher) throws IOException {
if (!(SuggestUtils.parseSuggestContext(parser, mapperService, fieldName, suggestion, parseFieldMatcher) || SuggestUtils.parseDirectSpellcheckerSettings(
parser, fieldName, settings, parseFieldMatcher))) {
throw new IllegalArgumentException("suggester[term] doesn't support [" + fieldName + "]");
}
}
}

View File

@ -28,9 +28,9 @@ import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.io.IOException;
@ -39,8 +39,11 @@ import java.util.List;
public final class TermSuggester extends Suggester<TermSuggestionContext> {
public static final TermSuggester PROTOTYPE = new TermSuggester();
@Override
public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare)
throws IOException {
DirectSpellChecker directSpellChecker = SuggestUtils.getDirectSpellChecker(suggestion.getDirectSpellCheckerSettings());
final IndexReader indexReader = searcher.getIndexReader();
TermSuggestion response = new TermSuggestion(
@ -63,12 +66,6 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
return response;
}
@Override
public SuggestContextParser getContextParser() {
return new TermSuggestParser(this);
}
private List<Token> queryTerms(SuggestionContext suggestion, CharsRefBuilder spare) throws IOException {
final List<Token> result = new ArrayList<>();
final String field = suggestion.getField();
@ -76,7 +73,7 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
@Override
public void nextToken() {
Term term = new Term(field, BytesRef.deepCopyOf(fillBytesRef(new BytesRefBuilder())));
result.add(new Token(term, offsetAttr.startOffset(), offsetAttr.endOffset()));
result.add(new Token(term, offsetAttr.startOffset(), offsetAttr.endOffset()));
}
}, spare);
return result;
@ -96,4 +93,9 @@ public final class TermSuggester extends Suggester<TermSuggestionContext> {
}
@Override
public SuggestionBuilder<?> getBuilderPrototype() {
return TermSuggestionBuilder.PROTOTYPE;
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
@ -37,6 +38,17 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
public static final Comparator<Suggestion.Entry.Option> SCORE = new Score();
public static final Comparator<Suggestion.Entry.Option> FREQUENCY = new Frequency();
public static final int TYPE = 1;
private SortBy sort;
public TermSuggestion() {
}
public TermSuggestion(String name, int size, SortBy sort) {
super(name, size);
this.sort = sort;
}
// Same behaviour as comparators in suggest module, but for SuggestedWord
// Highest score first, then highest freq first, then lowest term first
@ -79,17 +91,6 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
}
public static final int TYPE = 1;
private Sort sort;
public TermSuggestion() {
}
public TermSuggestion(String name, int size, Sort sort) {
super(name, size);
this.sort = sort;
}
@Override
public int getType() {
return TYPE;
@ -110,13 +111,13 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
@Override
protected void innerReadFrom(StreamInput in) throws IOException {
super.innerReadFrom(in);
sort = Sort.fromId(in.readByte());
sort = SortBy.PROTOTYPE.readFrom(in);
}
@Override
public void innerWriteTo(StreamOutput out) throws IOException {
super.innerWriteTo(out);
out.writeByte(sort.id());
sort.writeTo(out);
}
@Override

View File

@ -16,11 +16,52 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.term;
import org.apache.lucene.search.spell.DirectSpellChecker;
import org.apache.lucene.search.spell.JaroWinklerDistance;
import org.apache.lucene.search.spell.LevensteinDistance;
import org.apache.lucene.search.spell.LuceneLevenshteinDistance;
import org.apache.lucene.search.spell.NGramDistance;
import org.apache.lucene.search.spell.StringDistance;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.SuggestUtils;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_ACCURACY;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_EDITS;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_INSPECTIONS;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_TERM_FREQ;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_DOC_FREQ;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_WORD_LENGTH;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_PREFIX_LENGTH;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.ACCURACY;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MAX_EDITS;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MAX_INSPECTIONS;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MAX_TERM_FREQ;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MIN_DOC_FREQ;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.MIN_WORD_LENGTH;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.PREFIX_LENGTH;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.SORT;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.STRING_DISTANCE;
import static org.elasticsearch.search.suggest.SuggestUtils.Fields.SUGGEST_MODE;
/**
* Defines the actual suggest command. Each command uses the global options
@ -29,23 +70,39 @@ import java.io.IOException;
*/
public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuilder> {
private String suggestMode;
private Float accuracy;
private String sort;
private String stringDistance;
private Integer maxEdits;
private Integer maxInspections;
private Float maxTermFreq;
private Integer prefixLength;
private Integer minWordLength;
private Float minDocFreq;
public static final TermSuggestionBuilder PROTOTYPE = new TermSuggestionBuilder("_na_");
private static final String SUGGESTION_NAME = "term";
private SuggestMode suggestMode = SuggestMode.MISSING;
private float accuracy = DEFAULT_ACCURACY;
private SortBy sort = SortBy.SCORE;
private StringDistanceImpl stringDistance = StringDistanceImpl.INTERNAL;
private int maxEdits = DEFAULT_MAX_EDITS;
private int maxInspections = DEFAULT_MAX_INSPECTIONS;
private float maxTermFreq = DEFAULT_MAX_TERM_FREQ;
private int prefixLength = DEFAULT_PREFIX_LENGTH;
private int minWordLength = DEFAULT_MIN_WORD_LENGTH;
private float minDocFreq = DEFAULT_MIN_DOC_FREQ;
public TermSuggestionBuilder(String field) {
super(field);
}
/**
* @param name
* The name of this suggestion. This is a required parameter.
* internal copy constructor that copies over all class field except field.
*/
public TermSuggestionBuilder(String name) {
super(name, "term");
private TermSuggestionBuilder(String field, TermSuggestionBuilder in) {
super(field, in);
suggestMode = in.suggestMode;
accuracy = in.accuracy;
sort = in.sort;
stringDistance = in.stringDistance;
maxEdits = in.maxEdits;
maxInspections = in.maxInspections;
maxTermFreq = in.maxTermFreq;
prefixLength = in.prefixLength;
minWordLength = in.minWordLength;
minDocFreq = in.minDocFreq;
}
/**
@ -61,11 +118,19 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
* tokens in the suggest text.
* </ol>
*/
public TermSuggestionBuilder suggestMode(String suggestMode) {
public TermSuggestionBuilder suggestMode(SuggestMode suggestMode) {
Objects.requireNonNull(suggestMode, "suggestMode must not be null");
this.suggestMode = suggestMode;
return this;
}
/**
* Get the suggest mode setting.
*/
public SuggestMode suggestMode() {
return suggestMode;
}
/**
* s how similar the suggested terms at least need to be compared to the
* original suggest text tokens. A value between 0 and 1 can be specified.
@ -74,11 +139,21 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
* <p>
* Default is <tt>0.5</tt>
*/
public TermSuggestionBuilder setAccuracy(float accuracy) {
public TermSuggestionBuilder accuracy(float accuracy) {
if (accuracy < 0.0f || accuracy > 1.0f) {
throw new IllegalArgumentException("accuracy must be between 0 and 1");
}
this.accuracy = accuracy;
return this;
}
/**
* Get the accuracy setting.
*/
public float accuracy() {
return accuracy;
}
/**
* Sets how to sort the suggest terms per suggest text token. Two possible
* values:
@ -86,19 +161,27 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
* <li><code>score</code> - Sort should first be based on score, then
* document frequency and then the term itself.
* <li><code>frequency</code> - Sort should first be based on document
* frequency, then scotr and then the term itself.
* frequency, then score and then the term itself.
* </ol>
* <p>
* What the score is depends on the suggester being used.
*/
public TermSuggestionBuilder sort(String sort) {
public TermSuggestionBuilder sort(SortBy sort) {
Objects.requireNonNull(sort, "sort must not be null");
this.sort = sort;
return this;
}
/**
* Get the sort setting.
*/
public SortBy sort() {
return sort;
}
/**
* Sets what string distance implementation to use for comparing how similar
* suggested terms are. Four possible values can be specified:
* suggested terms are. Five possible values can be specified:
* <ol>
* <li><code>internal</code> - This is the default and is based on
* <code>damerau_levenshtein</code>, but highly optimized for comparing
@ -113,32 +196,60 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
* n-grams.
* </ol>
*/
public TermSuggestionBuilder stringDistance(String stringDistance) {
public TermSuggestionBuilder stringDistance(StringDistanceImpl stringDistance) {
Objects.requireNonNull(stringDistance, "stringDistance must not be null");
this.stringDistance = stringDistance;
return this;
}
/**
* Get the string distance implementation setting.
*/
public StringDistanceImpl stringDistance() {
return stringDistance;
}
/**
* Sets the maximum edit distance candidate suggestions can have in order to
* be considered as a suggestion. Can only be a value between 1 and 2. Any
* other value result in an bad request error being thrown. Defaults to
* <tt>2</tt>.
*/
public TermSuggestionBuilder maxEdits(Integer maxEdits) {
public TermSuggestionBuilder maxEdits(int maxEdits) {
if (maxEdits < 1 || maxEdits > 2) {
throw new IllegalArgumentException("maxEdits must be between 1 and 2");
}
this.maxEdits = maxEdits;
return this;
}
/**
* Get the maximum edit distance setting.
*/
public int maxEdits() {
return maxEdits;
}
/**
* A factor that is used to multiply with the size in order to inspect more
* candidate suggestions. Can improve accuracy at the cost of performance.
* Defaults to <tt>5</tt>.
*/
public TermSuggestionBuilder maxInspections(Integer maxInspections) {
public TermSuggestionBuilder maxInspections(int maxInspections) {
if (maxInspections < 0) {
throw new IllegalArgumentException("maxInspections must be positive");
}
this.maxInspections = maxInspections;
return this;
}
/**
* Get the factor for inspecting more candidate suggestions setting.
*/
public int maxInspections() {
return maxInspections;
}
/**
* Sets a maximum threshold in number of documents a suggest text token can
* exist in order to be corrected. Can be a relative percentage number (e.g
@ -151,10 +262,23 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
* also improves the suggest performance.
*/
public TermSuggestionBuilder maxTermFreq(float maxTermFreq) {
if (maxTermFreq < 0.0f) {
throw new IllegalArgumentException("maxTermFreq must be positive");
}
if (maxTermFreq > 1.0f && maxTermFreq != Math.floor(maxTermFreq)) {
throw new IllegalArgumentException("if maxTermFreq is greater than 1, it must not be a fraction");
}
this.maxTermFreq = maxTermFreq;
return this;
}
/**
* Get the maximum term frequency threshold setting.
*/
public float maxTermFreq() {
return maxTermFreq;
}
/**
* Sets the number of minimal prefix characters that must match in order be
* a candidate suggestion. Defaults to 1. Increasing this number improves
@ -162,19 +286,39 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
* terms.
*/
public TermSuggestionBuilder prefixLength(int prefixLength) {
if (prefixLength < 0) {
throw new IllegalArgumentException("prefixLength must be positive");
}
this.prefixLength = prefixLength;
return this;
}
/**
* Get the minimum prefix length that must match setting.
*/
public int prefixLength() {
return prefixLength;
}
/**
* The minimum length a suggest text term must have in order to be
* corrected. Defaults to <tt>4</tt>.
*/
public TermSuggestionBuilder minWordLength(int minWordLength) {
if (minWordLength < 1) {
throw new IllegalArgumentException("minWordLength must be greater or equal to 1");
}
this.minWordLength = minWordLength;
return this;
}
/**
* Get the minimum length of a text term to be corrected setting.
*/
public int minWordLength() {
return minWordLength;
}
/**
* Sets a minimal threshold in number of documents a suggested term should
* appear in. This can be specified as an absolute number or as a relative
@ -183,42 +327,294 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
* value higher than 1 is specified then the number cannot be fractional.
*/
public TermSuggestionBuilder minDocFreq(float minDocFreq) {
if (minDocFreq < 0.0f) {
throw new IllegalArgumentException("minDocFreq must be positive");
}
if (minDocFreq > 1.0f && minDocFreq != Math.floor(minDocFreq)) {
throw new IllegalArgumentException("if minDocFreq is greater than 1, it must not be a fraction");
}
this.minDocFreq = minDocFreq;
return this;
}
/**
* Get the minimal threshold for the frequency of a term appearing in the
* document set setting.
*/
public float minDocFreq() {
return minDocFreq;
}
@Override
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
if (suggestMode != null) {
builder.field("suggest_mode", suggestMode);
}
if (accuracy != null) {
builder.field("accuracy", accuracy);
}
if (sort != null) {
builder.field("sort", sort);
}
if (stringDistance != null) {
builder.field("string_distance", stringDistance);
}
if (maxEdits != null) {
builder.field("max_edits", maxEdits);
}
if (maxInspections != null) {
builder.field("max_inspections", maxInspections);
}
if (maxTermFreq != null) {
builder.field("max_term_freq", maxTermFreq);
}
if (prefixLength != null) {
builder.field("prefix_length", prefixLength);
}
if (minWordLength != null) {
builder.field("min_word_length", minWordLength);
}
if (minDocFreq != null) {
builder.field("min_doc_freq", minDocFreq);
}
builder.field(SUGGEST_MODE.getPreferredName(), suggestMode);
builder.field(ACCURACY.getPreferredName(), accuracy);
builder.field(SORT.getPreferredName(), sort);
builder.field(STRING_DISTANCE.getPreferredName(), stringDistance);
builder.field(MAX_EDITS.getPreferredName(), maxEdits);
builder.field(MAX_INSPECTIONS.getPreferredName(), maxInspections);
builder.field(MAX_TERM_FREQ.getPreferredName(), maxTermFreq);
builder.field(PREFIX_LENGTH.getPreferredName(), prefixLength);
builder.field(MIN_WORD_LENGTH.getPreferredName(), minWordLength);
builder.field(MIN_DOC_FREQ.getPreferredName(), minDocFreq);
return builder;
}
@Override
protected TermSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
TermSuggestionBuilder tmpSuggestion = new TermSuggestionBuilder("_na_");
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
XContentParser.Token token;
String currentFieldName = null;
String fieldname = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.ANALYZER_FIELD)) {
tmpSuggestion.analyzer(parser.text());
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.FIELDNAME_FIELD)) {
fieldname = parser.text();
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SIZE_FIELD)) {
tmpSuggestion.size(parser.intValue());
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SHARDSIZE_FIELD)) {
tmpSuggestion.shardSize(parser.intValue());
} else if (parseFieldMatcher.match(currentFieldName, SUGGEST_MODE)) {
tmpSuggestion.suggestMode(SuggestMode.resolve(parser.text()));
} else if (parseFieldMatcher.match(currentFieldName, ACCURACY)) {
tmpSuggestion.accuracy(parser.floatValue());
} else if (parseFieldMatcher.match(currentFieldName, SORT)) {
tmpSuggestion.sort(SortBy.resolve(parser.text()));
} else if (parseFieldMatcher.match(currentFieldName, STRING_DISTANCE)) {
tmpSuggestion.stringDistance(StringDistanceImpl.resolve(parser.text()));
} else if (parseFieldMatcher.match(currentFieldName, MAX_EDITS)) {
tmpSuggestion.maxEdits(parser.intValue());
} else if (parseFieldMatcher.match(currentFieldName, MAX_INSPECTIONS)) {
tmpSuggestion.maxInspections(parser.intValue());
} else if (parseFieldMatcher.match(currentFieldName, MAX_TERM_FREQ)) {
tmpSuggestion.maxTermFreq(parser.floatValue());
} else if (parseFieldMatcher.match(currentFieldName, PREFIX_LENGTH)) {
tmpSuggestion.prefixLength(parser.intValue());
} else if (parseFieldMatcher.match(currentFieldName, MIN_WORD_LENGTH)) {
tmpSuggestion.minWordLength(parser.intValue());
} else if (parseFieldMatcher.match(currentFieldName, MIN_DOC_FREQ)) {
tmpSuggestion.minDocFreq(parser.floatValue());
} else {
throw new ParsingException(parser.getTokenLocation(),
"suggester[term] doesn't support field [" + currentFieldName + "]");
}
} else {
throw new ParsingException(parser.getTokenLocation(), "suggester[term] parsing failed on [" + currentFieldName + "]");
}
}
// now we should have field name, check and copy fields over to the suggestion builder we return
if (fieldname == null) {
throw new ElasticsearchParseException(
"the required field option [" + SuggestUtils.Fields.FIELD.getPreferredName() + "] is missing");
}
return new TermSuggestionBuilder(fieldname, tmpSuggestion);
}
@Override
public SuggestionContext build(QueryShardContext context) throws IOException {
TermSuggestionContext suggestionContext = new TermSuggestionContext(context);
// copy over common settings to each suggestion builder
populateCommonFields(context.getMapperService(), suggestionContext);
// Transfers the builder settings to the target TermSuggestionContext
DirectSpellcheckerSettings settings = suggestionContext.getDirectSpellCheckerSettings();
settings.accuracy(accuracy);
settings.maxEdits(maxEdits);
settings.maxInspections(maxInspections);
settings.maxTermFreq(maxTermFreq);
settings.minDocFreq(minDocFreq);
settings.minWordLength(minWordLength);
settings.prefixLength(prefixLength);
settings.sort(sort);
settings.stringDistance(stringDistance.toLucene());
settings.suggestMode(suggestMode.toLucene());
return suggestionContext;
}
@Override
public String getWriteableName() {
return SUGGESTION_NAME;
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
suggestMode.writeTo(out);
out.writeFloat(accuracy);
sort.writeTo(out);
stringDistance.writeTo(out);
out.writeVInt(maxEdits);
out.writeVInt(maxInspections);
out.writeFloat(maxTermFreq);
out.writeVInt(prefixLength);
out.writeVInt(minWordLength);
out.writeFloat(minDocFreq);
}
@Override
public TermSuggestionBuilder doReadFrom(StreamInput in, String field) throws IOException {
TermSuggestionBuilder builder = new TermSuggestionBuilder(field);
builder.suggestMode = SuggestMode.PROTOTYPE.readFrom(in);
builder.accuracy = in.readFloat();
builder.sort = SortBy.PROTOTYPE.readFrom(in);
builder.stringDistance = StringDistanceImpl.PROTOTYPE.readFrom(in);
builder.maxEdits = in.readVInt();
builder.maxInspections = in.readVInt();
builder.maxTermFreq = in.readFloat();
builder.prefixLength = in.readVInt();
builder.minWordLength = in.readVInt();
builder.minDocFreq = in.readFloat();
return builder;
}
@Override
protected boolean doEquals(TermSuggestionBuilder other) {
return Objects.equals(suggestMode, other.suggestMode) &&
Objects.equals(accuracy, other.accuracy) &&
Objects.equals(sort, other.sort) &&
Objects.equals(stringDistance, other.stringDistance) &&
Objects.equals(maxEdits, other.maxEdits) &&
Objects.equals(maxInspections, other.maxInspections) &&
Objects.equals(maxTermFreq, other.maxTermFreq) &&
Objects.equals(prefixLength, other.prefixLength) &&
Objects.equals(minWordLength, other.minWordLength) &&
Objects.equals(minDocFreq, other.minDocFreq);
}
@Override
protected int doHashCode() {
return Objects.hash(suggestMode, accuracy, sort, stringDistance, maxEdits, maxInspections,
maxTermFreq, prefixLength, minWordLength, minDocFreq);
}
/** An enum representing the valid suggest modes. */
public enum SuggestMode implements Writeable<SuggestMode> {
/** Only suggest terms in the suggest text that aren't in the index. This is the default. */
MISSING {
@Override
public org.apache.lucene.search.spell.SuggestMode toLucene() {
return org.apache.lucene.search.spell.SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
}
},
/** Only suggest terms that occur in more docs then the original suggest text term. */
POPULAR {
@Override
public org.apache.lucene.search.spell.SuggestMode toLucene() {
return org.apache.lucene.search.spell.SuggestMode.SUGGEST_MORE_POPULAR;
}
},
/** Suggest any matching suggest terms based on tokens in the suggest text. */
ALWAYS {
@Override
public org.apache.lucene.search.spell.SuggestMode toLucene() {
return org.apache.lucene.search.spell.SuggestMode.SUGGEST_ALWAYS;
}
};
protected static SuggestMode PROTOTYPE = MISSING;
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(ordinal());
}
@Override
public SuggestMode readFrom(final StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown SuggestMode ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
public static SuggestMode resolve(final String str) {
Objects.requireNonNull(str, "Input string is null");
return valueOf(str.toUpperCase(Locale.ROOT));
}
public abstract org.apache.lucene.search.spell.SuggestMode toLucene();
}
/** An enum representing the valid string edit distance algorithms for determining suggestions. */
public enum StringDistanceImpl implements Writeable<StringDistanceImpl> {
/** This is the default and is based on <code>damerau_levenshtein</code>, but highly optimized
* for comparing string distance for terms inside the index. */
INTERNAL {
@Override
public StringDistance toLucene() {
return DirectSpellChecker.INTERNAL_LEVENSHTEIN;
}
},
/** String distance algorithm based on Damerau-Levenshtein algorithm. */
DAMERAU_LEVENSHTEIN {
@Override
public StringDistance toLucene() {
return new LuceneLevenshteinDistance();
}
},
/** String distance algorithm based on Levenstein edit distance algorithm. */
LEVENSTEIN {
@Override
public StringDistance toLucene() {
return new LevensteinDistance();
}
},
/** String distance algorithm based on Jaro-Winkler algorithm. */
JAROWINKLER {
@Override
public StringDistance toLucene() {
return new JaroWinklerDistance();
}
},
/** String distance algorithm based on character n-grams. */
NGRAM {
@Override
public StringDistance toLucene() {
return new NGramDistance();
}
};
protected static StringDistanceImpl PROTOTYPE = INTERNAL;
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(ordinal());
}
@Override
public StringDistanceImpl readFrom(final StreamInput in) throws IOException {
int ordinal = in.readVInt();
if (ordinal < 0 || ordinal >= values().length) {
throw new IOException("Unknown StringDistanceImpl ordinal [" + ordinal + "]");
}
return values()[ordinal];
}
public static StringDistanceImpl resolve(final String str) {
Objects.requireNonNull(str, "Input string is null");
final String distanceVal = str.toLowerCase(Locale.US);
switch (distanceVal) {
case "internal":
return INTERNAL;
case "damerau_levenshtein":
case "damerauLevenshtein":
return DAMERAU_LEVENSHTEIN;
case "levenstein":
return LEVENSTEIN;
case "ngram":
return NGRAM;
case "jarowinkler":
return JAROWINKLER;
default: throw new IllegalArgumentException("Illegal distance option " + str);
}
}
public abstract StringDistance toLucene();
}
}

View File

@ -18,20 +18,25 @@
*/
package org.elasticsearch.search.suggest.term;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
final class TermSuggestionContext extends SuggestionContext {
private final DirectSpellcheckerSettings settings = new DirectSpellcheckerSettings();
public TermSuggestionContext(Suggester<? extends TermSuggestionContext> suggester) {
super(suggester);
public TermSuggestionContext(QueryShardContext shardContext) {
super(TermSuggester.PROTOTYPE, shardContext);
}
public DirectSpellcheckerSettings getDirectSpellCheckerSettings() {
return settings;
}
}
@Override
public String toString() {
return "SpellcheckerSettings" + settings + ", BaseSettings" + super.toString();
}
}

View File

@ -45,7 +45,7 @@ public class MultiSearchRequestTests extends ESTestCase {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser()));
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch1.json");
MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null,
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null);
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null, null);
assertThat(request.requests().size(), equalTo(8));
assertThat(request.requests().get(0).indices()[0], equalTo("test"));
assertThat(request.requests().get(0).indicesOptions(), equalTo(IndicesOptions.fromOptions(true, true, true, true, IndicesOptions.strictExpandOpenAndForbidClosed())));
@ -72,7 +72,7 @@ public class MultiSearchRequestTests extends ESTestCase {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser()));
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch2.json");
MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null,
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null);
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null, null);
assertThat(request.requests().size(), equalTo(5));
assertThat(request.requests().get(0).indices()[0], equalTo("test"));
assertThat(request.requests().get(0).types().length, equalTo(0));
@ -91,7 +91,7 @@ public class MultiSearchRequestTests extends ESTestCase {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser()));
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch3.json");
MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null,
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null);
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null, null);
assertThat(request.requests().size(), equalTo(4));
assertThat(request.requests().get(0).indices()[0], equalTo("test0"));
assertThat(request.requests().get(0).indices()[1], equalTo("test1"));
@ -111,7 +111,7 @@ public class MultiSearchRequestTests extends ESTestCase {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser()));
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch4.json");
MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), false, null, null,
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null);
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null, null);
assertThat(request.requests().size(), equalTo(3));
assertThat(request.requests().get(0).indices()[0], equalTo("test0"));
assertThat(request.requests().get(0).indices()[1], equalTo("test1"));
@ -133,7 +133,7 @@ public class MultiSearchRequestTests extends ESTestCase {
IndicesQueriesRegistry registry = new IndicesQueriesRegistry(Settings.EMPTY, singletonMap("match_all", new MatchAllQueryParser()));
byte[] data = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/search/simple-msearch5.json");
MultiSearchRequest request = RestMultiSearchAction.parseRequest(new MultiSearchRequest(), new BytesArray(data), true, null, null,
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null);
null, null, IndicesOptions.strictExpandOpenAndForbidClosed(), true, registry, ParseFieldMatcher.EMPTY, null, null);
assertThat(request.requests().size(), equalTo(3));
assertThat(request.requests().get(0).indices()[0], equalTo("test0"));
assertThat(request.requests().get(0).indices()[1], equalTo("test1"));

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.io.stream;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
/**
* Abstract class offering base functionality for testing @{link Writeable} enums.
*/
public abstract class AbstractWriteableEnumTestCase extends ESTestCase {
/**
* Test that the ordinals for the enum are consistent (i.e. the order hasn't changed)
* because writing an enum to a stream often uses the ordinal value.
*/
public abstract void testValidOrdinals();
/**
* Test that the conversion from a string to enum is correct.
*/
public abstract void testFromString();
/**
* Test that the correct enum value is produced from the serialized value in the {@link StreamInput}.
*/
public abstract void testReadFrom() throws IOException;
/**
* Test that the correct serialized value is produced from the {@link StreamOutput}.
*/
public abstract void testWriteTo() throws IOException;
// a convenience method for testing the write of a writeable enum
protected static void assertWriteToStream(final Writeable writeableEnum, final int ordinal) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) {
writeableEnum.writeTo(out);
try (StreamInput in = StreamInput.wrap(out.bytes())) {
assertThat(in.readVInt(), equalTo(ordinal));
}
}
}
// a convenience method for testing the read of a writeable enum
protected static <T extends Writeable<T>> void assertReadFromStream(final int ordinal, final Writeable<T> expected) throws IOException {
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.writeVInt(ordinal);
try (StreamInput in = StreamInput.wrap(out.bytes())) {
assertThat(expected.readFrom(in), equalTo(expected));
}
}
}
}

View File

@ -63,8 +63,8 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.engine.MockEngineFactory;

View File

@ -139,9 +139,9 @@ public class SuggestStatsIT extends ESIntegTestCase {
private SuggestRequestBuilder addSuggestions(SuggestRequestBuilder request, int i) {
for (int s = 0; s < randomIntBetween(2, 10); s++) {
if (randomBoolean()) {
request.addSuggestion(new PhraseSuggestionBuilder("s" + s).field("f").text("test" + i + " test" + (i - 1)));
request.addSuggestion("s" + s, new PhraseSuggestionBuilder("f").text("test" + i + " test" + (i - 1)));
} else {
request.addSuggestion(new TermSuggestionBuilder("s" + s).field("f").text("test" + i));
request.addSuggestion("s" + s, new TermSuggestionBuilder("f").text("test" + i));
}
}
return request;

View File

@ -754,7 +754,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
}
private static SuggestRequestBuilder suggest(String... indices) {
return client().prepareSuggest(indices).addSuggestion(SuggestBuilders.termSuggestion("name").field("a"));
return client().prepareSuggest(indices).addSuggestion("name", SuggestBuilders.termSuggestion("a"));
}
private static GetAliasesRequestBuilder getAliases(String... indices) {

View File

@ -48,7 +48,7 @@ public class SearchModuleTests extends ModuleTestCase {
}
try {
module.registerSuggester("term", PhraseSuggester.class);
module.registerSuggester("term", PhraseSuggester.PROTOTYPE);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [term]");
}
@ -56,9 +56,9 @@ public class SearchModuleTests extends ModuleTestCase {
public void testRegisterSuggester() {
SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry());
module.registerSuggester("custom", CustomSuggester.class);
module.registerSuggester("custom", CustomSuggester.PROTOTYPE);
try {
module.registerSuggester("custom", CustomSuggester.class);
module.registerSuggester("custom", CustomSuggester.PROTOTYPE);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [custom]");
}

View File

@ -77,8 +77,8 @@ import org.elasticsearch.search.searchafter.SearchAfterBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestBuilders;
import org.elasticsearch.search.suggest.SuggestBuilderTests;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.InternalSettingsPlugin;
@ -110,6 +110,8 @@ public class SearchSourceBuilderTests extends ESTestCase {
private static AggregatorParsers aggParsers;
private static Suggesters suggesters;
private static String[] currentTypes;
private static ParseFieldMatcher parseFieldMatcher;
@ -167,7 +169,6 @@ public class SearchSourceBuilderTests extends ESTestCase {
new EnvironmentModule(new Environment(settings)), settingsModule,
new ThreadPoolModule(new ThreadPool(settings)),
scriptModule, new IndicesModule() {
@Override
protected void configure() {
bindMapperExtension();
@ -177,13 +178,8 @@ public class SearchSourceBuilderTests extends ESTestCase {
protected void configureSearch() {
// Skip me
}
@Override
protected void configureSuggesters() {
// Skip me
}
},
new IndexSettingsModule(index, settings),
new AbstractModule() {
@Override
protected void configure() {
@ -194,6 +190,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
}
).createInjector();
aggParsers = injector.getInstance(AggregatorParsers.class);
suggesters = injector.getInstance(Suggesters.class);
// create some random type with some default field, those types will
// stick around for all of the subclasses
currentTypes = new String[randomIntBetween(0, 5)];
@ -410,9 +407,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
builder.highlighter(HighlightBuilderTests.randomHighlighterBuilder());
}
if (randomBoolean()) {
// NORELEASE need a random suggest builder method
builder.suggest(new SuggestBuilder().setText(randomAsciiOfLengthBetween(1, 5)).addSuggestion(
SuggestBuilders.termSuggestion(randomAsciiOfLengthBetween(1, 5))));
builder.suggest(SuggestBuilderTests.randomSuggestBuilder());
}
if (randomBoolean()) {
// NORELEASE need a random inner hits builder method
@ -460,7 +455,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
if (randomBoolean()) {
parser.nextToken(); // sometimes we move it on the START_OBJECT to test the embedded case
}
SearchSourceBuilder newBuilder = SearchSourceBuilder.parseSearchSource(parser, parseContext, aggParsers);
SearchSourceBuilder newBuilder = SearchSourceBuilder.parseSearchSource(parser, parseContext, aggParsers, suggesters);
assertNull(parser.nextToken());
assertEquals(testBuilder, newBuilder);
assertEquals(testBuilder.hashCode(), newBuilder.hashCode());
@ -498,14 +493,17 @@ public class SearchSourceBuilderTests extends ESTestCase {
assertTrue("source builder is not equal to self", secondBuilder.equals(secondBuilder));
assertTrue("source builder is not equal to its copy", firstBuilder.equals(secondBuilder));
assertTrue("source builder is not symmetric", secondBuilder.equals(firstBuilder));
assertThat("source builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode()));
assertThat("source builder copy's hashcode is different from original hashcode",
secondBuilder.hashCode(), equalTo(firstBuilder.hashCode()));
SearchSourceBuilder thirdBuilder = copyBuilder(secondBuilder);
assertTrue("source builder is not equal to self", thirdBuilder.equals(thirdBuilder));
assertTrue("source builder is not equal to its copy", secondBuilder.equals(thirdBuilder));
assertThat("source builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
assertThat("source builder copy's hashcode is different from original hashcode",
secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder));
assertThat("source builder copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
assertThat("source builder copy's hashcode is different from original hashcode",
firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder));
assertTrue("equals is not symmetric", thirdBuilder.equals(firstBuilder));
}
@ -525,7 +523,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
String restContent = " { \"_source\": { \"includes\": \"include\", \"excludes\": \"*.field2\"}}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.parseSearchSource(parser, createParseContext(parser),
aggParsers);
aggParsers, suggesters);
assertArrayEquals(new String[]{"*.field2" }, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{"include" }, searchSourceBuilder.fetchSource().includes());
}
@ -534,7 +532,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
String restContent = " { \"_source\": false}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.parseSearchSource(parser, createParseContext(parser),
aggParsers);
aggParsers, suggesters);
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().excludes());
assertArrayEquals(new String[]{}, searchSourceBuilder.fetchSource().includes());
assertFalse(searchSourceBuilder.fetchSource().fetchSource());
@ -547,7 +545,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
String restContent = " { \"sort\": \"foo\"}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.parseSearchSource(parser, createParseContext(parser),
aggParsers);
aggParsers, suggesters);
assertEquals(1, searchSourceBuilder.sorts().size());
assertEquals("{\"foo\":{\"order\":\"asc\"}}", searchSourceBuilder.sorts().get(0).toUtf8());
}
@ -563,7 +561,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
" ]}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.parseSearchSource(parser, createParseContext(parser),
aggParsers);
aggParsers, suggesters);
assertEquals(5, searchSourceBuilder.sorts().size());
assertEquals("{\"post_date\":{\"order\":\"asc\"}}", searchSourceBuilder.sorts().get(0).toUtf8());
assertEquals("\"user\"", searchSourceBuilder.sorts().get(1).toUtf8());

View File

@ -109,14 +109,17 @@ public class QueryRescoreBuilderTests extends ESTestCase {
assertTrue("rescore builder is not equal to self", secondBuilder.equals(secondBuilder));
assertTrue("rescore builder is not equal to its copy", firstBuilder.equals(secondBuilder));
assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder));
assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode()));
assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(),
equalTo(firstBuilder.hashCode()));
RescoreBuilder<?> thirdBuilder = serializedCopy(secondBuilder);
assertTrue("rescore builder is not equal to self", thirdBuilder.equals(thirdBuilder));
assertTrue("rescore builder is not equal to its copy", secondBuilder.equals(thirdBuilder));
assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
assertThat("rescore builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(),
equalTo(thirdBuilder.hashCode()));
assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder));
assertThat("rescore builder copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
assertThat("rescore builder copy's hashcode is different from original hashcode", firstBuilder.hashCode(),
equalTo(thirdBuilder.hashCode()));
assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder));
assertTrue("equals is not symmetric", thirdBuilder.equals(firstBuilder));
}
@ -172,7 +175,8 @@ public class QueryRescoreBuilderTests extends ESTestCase {
QueryRescoreContext rescoreContext = rescoreBuilder.build(mockShardContext);
XContentParser parser = createParser(rescoreBuilder);
QueryRescoreContext parsedRescoreContext = (QueryRescoreContext) new RescoreParseElement().parseSingleRescoreContext(parser, mockShardContext);
QueryRescoreContext parsedRescoreContext = (QueryRescoreContext) new RescoreParseElement().parseSingleRescoreContext(parser,
mockShardContext);
assertNotSame(rescoreContext, parsedRescoreContext);
assertEquals(rescoreContext.window(), parsedRescoreContext.window());
assertEquals(rescoreContext.query(), parsedRescoreContext.query());
@ -315,7 +319,8 @@ public class QueryRescoreBuilderTests extends ESTestCase {
* create random shape that is put under test
*/
public static org.elasticsearch.search.rescore.QueryRescorerBuilder randomRescoreBuilder() {
QueryBuilder<MatchAllQueryBuilder> queryBuilder = new MatchAllQueryBuilder().boost(randomFloat()).queryName(randomAsciiOfLength(20));
QueryBuilder<MatchAllQueryBuilder> queryBuilder = new MatchAllQueryBuilder().boost(randomFloat())
.queryName(randomAsciiOfLength(20));
org.elasticsearch.search.rescore.QueryRescorerBuilder rescorer = new
org.elasticsearch.search.rescore.QueryRescorerBuilder(queryBuilder);
if (randomBoolean()) {

View File

@ -0,0 +1,248 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptServiceTests.TestEngineService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBuilder<SB>> extends ESTestCase {
private static final int NUMBER_OF_TESTBUILDERS = 20;
protected static NamedWriteableRegistry namedWriteableRegistry;
protected static IndicesQueriesRegistry queriesRegistry;
protected static ParseFieldMatcher parseFieldMatcher;
protected static Suggesters suggesters;
/**
* setup for the whole base test class
*/
@BeforeClass
public static void init() throws IOException {
Path genericConfigFolder = createTempDir();
Settings baseSettings = settingsBuilder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder)
.build();
Environment environment = new Environment(baseSettings);
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry
.ScriptEngineRegistration(TestEngineService.class, TestEngineService.TYPES)));
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
suggesters = new Suggesters(Collections.emptyMap());
namedWriteableRegistry = new NamedWriteableRegistry();
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE);
queriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry();
parseFieldMatcher = ParseFieldMatcher.STRICT;
}
@AfterClass
public static void afterClass() throws Exception {
namedWriteableRegistry = null;
suggesters = null;
queriesRegistry = null;
}
/**
* Test serialization and deserialization of the suggestion builder
*/
public void testSerialization() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SB original = randomTestBuilder();
SB deserialized = serializedCopy(original);
assertEquals(deserialized, original);
assertEquals(deserialized.hashCode(), original.hashCode());
assertNotSame(deserialized, original);
}
}
/**
* returns a random suggestion builder, setting the common options randomly
*/
protected SB randomTestBuilder() {
SB randomSuggestion = randomSuggestionBuilder();
return randomSuggestion;
}
public static void setCommonPropertiesOnRandomBuilder(SuggestionBuilder<?> randomSuggestion) {
randomSuggestion.text(randomAsciiOfLengthBetween(2, 20)); // have to set the text because we don't know if the global text was set
maybeSet(randomSuggestion::prefix, randomAsciiOfLengthBetween(2, 20));
maybeSet(randomSuggestion::regex, randomAsciiOfLengthBetween(2, 20));
maybeSet(randomSuggestion::analyzer, randomAsciiOfLengthBetween(2, 20));
maybeSet(randomSuggestion::size, randomIntBetween(1, 20));
maybeSet(randomSuggestion::shardSize, randomIntBetween(1, 20));
}
/**
* create a randomized {@link SuggestBuilder} that is used in further tests
*/
protected abstract SB randomSuggestionBuilder();
/**
* Test equality and hashCode properties
*/
public void testEqualsAndHashcode() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SB firstBuilder = randomTestBuilder();
assertFalse("suggestion builder is equal to null", firstBuilder.equals(null));
assertFalse("suggestion builder is equal to incompatible type", firstBuilder.equals(""));
assertTrue("suggestion builder is not equal to self", firstBuilder.equals(firstBuilder));
assertThat("same suggestion builder's hashcode returns different values if called multiple times", firstBuilder.hashCode(),
equalTo(firstBuilder.hashCode()));
final SB mutate = mutate(firstBuilder);
assertThat("different suggestion builders should not be equal", mutate, not(equalTo(firstBuilder)));
SB secondBuilder = serializedCopy(firstBuilder);
assertTrue("suggestion builder is not equal to self", secondBuilder.equals(secondBuilder));
assertTrue("suggestion builder is not equal to its copy", firstBuilder.equals(secondBuilder));
assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder));
assertThat("suggestion builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(),
equalTo(firstBuilder.hashCode()));
SB thirdBuilder = serializedCopy(secondBuilder);
assertTrue("suggestion builder is not equal to self", thirdBuilder.equals(thirdBuilder));
assertTrue("suggestion builder is not equal to its copy", secondBuilder.equals(thirdBuilder));
assertThat("suggestion builder copy's hashcode is different from original hashcode", secondBuilder.hashCode(),
equalTo(thirdBuilder.hashCode()));
assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder));
assertThat("suggestion builder copy's hashcode is different from original hashcode", firstBuilder.hashCode(),
equalTo(thirdBuilder.hashCode()));
assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder));
assertTrue("equals is not symmetric", thirdBuilder.equals(firstBuilder));
}
}
/**
* creates random suggestion builder, renders it to xContent and back to new
* instance that should be equal to original
*/
public void testFromXContent() throws IOException {
QueryParseContext context = new QueryParseContext(null);
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SB suggestionBuilder = randomTestBuilder();
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
xContentBuilder.prettyPrint();
}
xContentBuilder.startObject();
suggestionBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
xContentBuilder.endObject();
XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes());
context.reset(parser);
// we need to skip the start object and the name, those will be parsed by outer SuggestBuilder
parser.nextToken();
SuggestionBuilder<?> secondSuggestionBuilder = SuggestionBuilder.fromXContent(context, suggesters);
assertNotSame(suggestionBuilder, secondSuggestionBuilder);
assertEquals(suggestionBuilder, secondSuggestionBuilder);
assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode());
}
}
private SB mutate(SB firstBuilder) throws IOException {
SB mutation = serializedCopy(firstBuilder);
assertNotSame(mutation, firstBuilder);
// change ither one of the shared SuggestionBuilder parameters, or delegate to the specific tests mutate method
if (randomBoolean()) {
switch (randomIntBetween(0, 5)) {
case 0:
mutation.text(randomValueOtherThan(mutation.text(), () -> randomAsciiOfLengthBetween(2, 20)));
break;
case 1:
mutation.prefix(randomValueOtherThan(mutation.prefix(), () -> randomAsciiOfLengthBetween(2, 20)));
break;
case 2:
mutation.regex(randomValueOtherThan(mutation.regex(), () -> randomAsciiOfLengthBetween(2, 20)));
break;
case 3:
mutation.analyzer(randomValueOtherThan(mutation.analyzer(), () -> randomAsciiOfLengthBetween(2, 20)));
break;
case 4:
mutation.size(randomValueOtherThan(mutation.size(), () -> randomIntBetween(1, 20)));
break;
case 5:
mutation.shardSize(randomValueOtherThan(mutation.shardSize(), () -> randomIntBetween(1, 20)));
break;
}
} else {
mutateSpecificParameters(firstBuilder);
}
return mutation;
}
/**
* take and input {@link SuggestBuilder} and return another one that is
* different in one aspect (to test non-equality)
*/
protected abstract void mutateSpecificParameters(SB firstBuilder) throws IOException;
@SuppressWarnings("unchecked")
protected SB serializedCopy(SB original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
output.writeSuggestion(original);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
return (SB) in.readSuggestion();
}
}
}
protected static QueryParseContext newParseContext(final String xcontent) throws IOException {
final QueryParseContext parseContext = new QueryParseContext(queriesRegistry);
parseContext.reset(XContentFactory.xContent(xcontent).createParser(xcontent));
parseContext.parseFieldMatcher(parseFieldMatcher);
return parseContext;
}
}

View File

@ -20,10 +20,10 @@ package org.elasticsearch.search.suggest;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.apache.lucene.analysis.TokenStreamToAutomaton;
import org.apache.lucene.search.suggest.document.ContextSuggestField;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.admin.indices.segments.IndexShardSegments;
@ -32,7 +32,6 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.settings.Settings;
@ -46,7 +45,7 @@ import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.suggest.completion.CompletionStats;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder.FuzzyOptionsBuilder;
import org.elasticsearch.search.suggest.completion.FuzzyOptions;
import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.GeoContextMapping;
@ -55,6 +54,7 @@ import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
@ -103,7 +103,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
));
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6");
}
@ -124,7 +124,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
));
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).regex("sugg.*es");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).regex("sugg.*es");
assertSuggestions("foo", prefix, "sugg10estion", "sugg9estion", "sugg8estion", "sugg7estion", "sugg6estion");
}
@ -145,7 +145,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
));
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg", Fuzziness.ONE);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE);
assertSuggestions("foo", prefix, "sugxgestion10", "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6");
}
@ -171,13 +171,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
for (int i = 0; i < size; i++) {
outputs[i] = "suggestion" + (numDocs - i);
}
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sug").size(size);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sug").size(size);
assertSuggestions("foo", prefix, outputs);
CompletionSuggestionBuilder regex = SuggestBuilders.completionSuggestion("foo").field(FIELD).regex("su[g|s]g").size(size);
CompletionSuggestionBuilder regex = SuggestBuilders.completionSuggestion(FIELD).regex("su[g|s]g").size(size);
assertSuggestions("foo", regex, outputs);
CompletionSuggestionBuilder fuzzyPrefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg", Fuzziness.ONE).size(size);
CompletionSuggestionBuilder fuzzyPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE).size(size);
assertSuggestions("foo", fuzzyPrefix, outputs);
}
@ -196,8 +196,9 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").size(numDocs).payload("count");
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet();
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").
size(numDocs).payload(Collections.singletonList("count"));
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet();
assertNoFailures(suggestResponse);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo");
CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0);
@ -208,32 +209,6 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
}
}
public void testMalformedRequestPayload() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
SuggestRequest request = new SuggestRequest(INDEX);
XContentBuilder suggest = jsonBuilder().startObject()
.startObject("bad-payload")
.field("prefix", "sug")
.startObject("completion")
.field("field", FIELD)
.startArray("payload")
.startObject()
.field("payload", "field")
.endObject()
.endArray()
.endObject()
.endObject().endObject();
request.suggest(suggest.bytes());
ensureGreen();
SuggestResponse suggestResponse = client().suggest(request).get();
assertThat(suggestResponse.getSuccessfulShards(), equalTo(0));
for (ShardOperationFailedException exception : suggestResponse.getShardFailures()) {
assertThat(exception.reason(), containsString("ParsingException[[completion] failed to parse field [payload]]; nested: IllegalStateException[Can't get text on a START_OBJECT"));
}
}
public void testMissingPayloadField() throws Exception {
final CompletionMappingBuilder mapping = new CompletionMappingBuilder();
createIndexAndMapping(mapping);
@ -242,8 +217,9 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
client().prepareIndex(INDEX, TYPE, "2").setSource(FIELD, "suggestion")
);
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").payload("test_field");
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet();
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.payload(Collections.singletonList("test_field"));
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet();
assertNoFailures(suggestResponse);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo");
CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0);
@ -279,8 +255,9 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "2").setSource(source));
indexRandom(true, indexRequestBuilders);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").payload("title", "count");
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet();
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.payload(Arrays.asList("title", "count"));
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet();
assertNoFailures(suggestResponse);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo");
List<CompletionSuggestion.Entry.Option> options = completionSuggestion.getEntries().get(0).getOptions();
@ -324,13 +301,14 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
int suggestionSize = randomIntBetween(1, numDocs);
int numRequestedPayloadFields = randomIntBetween(2, numPayloadFields);
String[] payloadFields = new String[numRequestedPayloadFields];
List<String> payloadFields = new ArrayList<>(numRequestedPayloadFields);
for (int i = 0; i < numRequestedPayloadFields; i++) {
payloadFields[i] = "test_field" + i;
payloadFields.add("test_field" + i);
}
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg").size(suggestionSize).payload(payloadFields);
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(prefix).execute().actionGet();
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.size(suggestionSize).payload(payloadFields);
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", prefix).execute().actionGet();
assertNoFailures(suggestResponse);
CompletionSuggestion completionSuggestion = suggestResponse.getSuggest().getSuggestion("foo");
CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0);
@ -428,8 +406,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
new CompletionSuggestionBuilder("testSuggestions").field(FIELD).text("test").size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("testSuggestions",
new CompletionSuggestionBuilder(FIELD).text("test").size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, "testSuggestions", "testing");
@ -629,16 +607,16 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
.get();
assertThat(putMappingResponse.isAcknowledged(), is(true));
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("suggs").field(FIELD + ".suggest").text("f").size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("suggs",
SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, "suggs");
client().prepareIndex(INDEX, TYPE, "1").setRefresh(true).setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get();
ensureGreen(INDEX);
SuggestResponse afterReindexingResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("suggs").field(FIELD + ".suggest").text("f").size(10)
SuggestResponse afterReindexingResponse = client().prepareSuggest(INDEX).addSuggestion("suggs",
SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)
).execute().actionGet();
assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters");
}
@ -654,13 +632,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirv").size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nirv").size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "Nirvana");
suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirw", Fuzziness.ONE).size(10)
suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", Fuzziness.ONE).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "Nirvana");
}
@ -677,14 +655,14 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
// edit distance 1
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Norw", Fuzziness.ONE).size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.ONE).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo");
// edit distance 2
suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Norw", Fuzziness.TWO).size(10)
suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.TWO).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "Nirvana");
}
@ -700,13 +678,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriv", new FuzzyOptionsBuilder().setTranspositions(false)).size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo");
suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriv", Fuzziness.ONE).size(10)
suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", Fuzziness.ONE).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "Nirvana");
}
@ -722,13 +700,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nriva", new FuzzyOptionsBuilder().setFuzzyMinLength(6)).size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo");
suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nrivan", new FuzzyOptionsBuilder().setFuzzyMinLength(6)).size(10)
suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "Nirvana");
}
@ -744,13 +722,13 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirw", new FuzzyOptionsBuilder().setFuzzyPrefixLength(4)).size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo");
suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("Nirvo", new FuzzyOptionsBuilder().setFuzzyPrefixLength(4)).size(10)
suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo",
SuggestBuilders.completionSuggestion(FIELD).prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "Nirvana");
}
@ -768,19 +746,19 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
// suggestion with a character, which needs unicode awareness
org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder completionSuggestionBuilder =
SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", new FuzzyOptionsBuilder().setUnicodeAware(true)).size(10);
SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build()).size(10);
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet();
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "ööööö");
// removing unicode awareness leads to no result
completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", new FuzzyOptionsBuilder().setUnicodeAware(false)).size(10);
suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet();
completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()).size(10);
suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo");
// increasing edit distance instead of unicode awareness works again, as this is only a single character
completionSuggestionBuilder = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("öööи", new FuzzyOptionsBuilder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO)).size(10);
suggestResponse = client().prepareSuggest(INDEX).addSuggestion(completionSuggestionBuilder).execute().actionGet();
completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build()).size(10);
suggestResponse = client().prepareSuggest(INDEX).addSuggestion("foo", completionSuggestionBuilder).execute().actionGet();
assertSuggestions(suggestResponse, false, "foo", "ööööö");
}
@ -809,8 +787,8 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
ensureGreen();
// load the fst index into ram
client().prepareSuggest(INDEX).addSuggestion(SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("f")).get();
client().prepareSuggest(INDEX).addSuggestion(SuggestBuilders.completionSuggestion("foo").field(otherField).prefix("f")).get();
client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f")).get();
client().prepareSuggest(INDEX).addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f")).get();
// Get all stats
IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get();
@ -907,22 +885,22 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
}
public void assertSuggestions(String suggestionName, SuggestBuilder.SuggestionBuilder suggestBuilder, String... suggestions) {
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestBuilder
public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) {
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, suggestBuilder
).execute().actionGet();
assertSuggestions(suggestResponse, suggestionName, suggestions);
}
public void assertSuggestions(String suggestion, String... suggestions) {
String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10);
CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion(suggestionName).field(FIELD).text(suggestion).size(10);
CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).text(suggestion).size(10);
assertSuggestions(suggestionName, suggestionBuilder, suggestions);
}
public void assertSuggestionsNotInOrder(String suggestString, String... suggestions) {
String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10);
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(
SuggestBuilders.completionSuggestion(suggestionName).field(FIELD).text(suggestString).size(10)
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName,
SuggestBuilders.completionSuggestion(FIELD).text(suggestString).size(10)
).execute().actionGet();
assertSuggestions(suggestResponse, false, suggestionName, suggestions);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.suggest;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.action.index.IndexRequestBuilder;
@ -35,12 +36,14 @@ import org.elasticsearch.search.suggest.completion.context.ContextBuilder;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.GeoContextMapping;
import org.elasticsearch.search.suggest.completion.context.GeoQueryContext;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
@ -89,7 +92,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
}
@ -121,7 +124,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).regex("sugg.*es");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).regex("sugg.*es");
assertSuggestions("foo", prefix, "sugg9estion", "sugg8estion", "sugg7estion", "sugg6estion", "sugg5estion");
}
@ -153,7 +156,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg", Fuzziness.ONE);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE);
assertSuggestions("foo", prefix, "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6", "sugxgestion5");
}
@ -178,8 +181,8 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg")
.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build());
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.contexts(Collections.singletonMap("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat0").build())));
assertSuggestions("foo", prefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0");
}
@ -205,10 +208,10 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg")
.categoryContexts("cat",
CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(),
CategoryQueryContext.builder().setCategory("cat1").build()
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.contexts(Collections.singletonMap("cat",
Arrays.asList(CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(),
CategoryQueryContext.builder().setCategory("cat1").build()))
);
assertSuggestions("foo", prefix, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion2");
}
@ -235,7 +238,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
}
@ -265,25 +268,22 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
ensureYellow(INDEX);
// filter only on context cat
CompletionSuggestionBuilder catFilterSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
catFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat0").build());
CompletionSuggestionBuilder catFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
catFilterSuggest.contexts(Collections.singletonMap("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat0").build())));
assertSuggestions("foo", catFilterSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0");
// filter only on context type
CompletionSuggestionBuilder typeFilterSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
typeFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build(),
CategoryQueryContext.builder().setCategory("type1").build());
CompletionSuggestionBuilder typeFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
typeFilterSuggest.contexts(Collections.singletonMap("type", Arrays.asList(CategoryQueryContext.builder().setCategory("type2").build(),
CategoryQueryContext.builder().setCategory("type1").build())));
assertSuggestions("foo", typeFilterSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1");
CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
// query context order should never matter
if (randomBoolean()) {
multiContextFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build());
multiContextFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat2").build());
} else {
multiContextFilterSuggest.categoryContexts("cat", CategoryQueryContext.builder().setCategory("cat2").build());
multiContextFilterSuggest.categoryContexts("type", CategoryQueryContext.builder().setCategory("type2").build());
}
Map<String, List<? extends QueryContext>> contextMap = new HashMap<>();
contextMap.put("type", Collections.singletonList(CategoryQueryContext.builder().setCategory("type2").build()));
contextMap.put("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat2").build()));
multiContextFilterSuggest.contexts(contextMap);
assertSuggestions("foo", multiContextFilterSuggest, "suggestion6", "suggestion2");
}
@ -313,37 +313,34 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
ensureYellow(INDEX);
// boost only on context cat
CompletionSuggestionBuilder catBoostSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
catBoostSuggest.categoryContexts("cat",
CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(),
CategoryQueryContext.builder().setCategory("cat1").build());
CompletionSuggestionBuilder catBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
catBoostSuggest.contexts(Collections.singletonMap("cat",
Arrays.asList(
CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(),
CategoryQueryContext.builder().setCategory("cat1").build())));
assertSuggestions("foo", catBoostSuggest, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion2");
// boost only on context type
CompletionSuggestionBuilder typeBoostSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
typeBoostSuggest.categoryContexts("type",
CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(),
CategoryQueryContext.builder().setCategory("type1").setBoost(4).build());
CompletionSuggestionBuilder typeBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
typeBoostSuggest.contexts(Collections.singletonMap("type",
Arrays.asList(
CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(),
CategoryQueryContext.builder().setCategory("type1").setBoost(4).build())));
assertSuggestions("foo", typeBoostSuggest, "suggestion9", "suggestion5", "suggestion6", "suggestion1", "suggestion2");
// boost on both contexts
CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder multiContextBoostSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
// query context order should never matter
if (randomBoolean()) {
multiContextBoostSuggest.categoryContexts("type",
CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(),
CategoryQueryContext.builder().setCategory("type1").setBoost(4).build());
multiContextBoostSuggest.categoryContexts("cat",
CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(),
CategoryQueryContext.builder().setCategory("cat1").build());
} else {
multiContextBoostSuggest.categoryContexts("cat",
CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(),
CategoryQueryContext.builder().setCategory("cat1").build());
multiContextBoostSuggest.categoryContexts("type",
CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(),
CategoryQueryContext.builder().setCategory("type1").setBoost(4).build());
}
Map<String, List<? extends QueryContext>> contextMap = new HashMap<>();
contextMap.put("type", Arrays.asList(
CategoryQueryContext.builder().setCategory("type2").setBoost(2).build(),
CategoryQueryContext.builder().setCategory("type1").setBoost(4).build())
);
contextMap.put("cat", Arrays.asList(
CategoryQueryContext.builder().setCategory("cat0").setBoost(3).build(),
CategoryQueryContext.builder().setCategory("cat1").build())
);
multiContextBoostSuggest.contexts(contextMap);
assertSuggestions("foo", multiContextBoostSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1");
}
@ -374,7 +371,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
}
@ -405,7 +402,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion0", "suggestion1", "suggestion2", "suggestion3", "suggestion4");
}
@ -431,7 +428,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
}
@ -458,11 +455,12 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg")
.geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(geoPoints[0])).build());
CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.contexts(Collections.singletonMap("geo", Collections.singletonList(
GeoQueryContext.builder().setGeoPoint(new GeoPoint(geoPoints[0])).build())));
assertSuggestions("foo", geoFilteringPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion2", "suggestion0");
}
@ -490,13 +488,13 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
GeoQueryContext context1 = GeoQueryContext.builder().setGeoPoint(geoPoints[0]).setBoost(2).build();
GeoQueryContext context2 = GeoQueryContext.builder().setGeoPoint(geoPoints[1]).build();
CompletionSuggestionBuilder geoBoostingPrefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg")
.geoContexts("geo", context1, context2);
CompletionSuggestionBuilder geoBoostingPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.contexts(Collections.singletonMap("geo", Arrays.asList(context1, context2)));
assertSuggestions("foo", geoBoostingPrefix, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion7");
}
@ -526,8 +524,8 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg")
.geoContexts("geo", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.2263, 4.543)).build());
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.contexts(Collections.singletonMap("geo", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.2263, 4.543)).build())));
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
}
@ -564,11 +562,11 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
}
indexRandom(true, indexRequestBuilders);
ensureYellow(INDEX);
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg");
CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg");
assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion("foo").field(FIELD).prefix("sugg")
.geoContexts("geo", GeoQueryContext.builder().setGeoPoint(GeoPoint.fromGeohash(geohash)).build());
CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg")
.contexts(Collections.singletonMap("geo", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(GeoPoint.fromGeohash(geohash)).build())));
assertSuggestions("foo", geoNeighbourPrefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5");
}
@ -624,16 +622,16 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
refresh();
String suggestionName = randomAsciiOfLength(10);
CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(suggestionName).field(FIELD).text("h").size(10)
.geoContexts("st", GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build());
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(context).get();
CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10)
.contexts(Collections.singletonMap("st", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build())));
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, context).get();
assertEquals(suggestResponse.getSuggest().size(), 1);
assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest().getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string());
}
public void assertSuggestions(String suggestionName, SuggestBuilder.SuggestionBuilder suggestBuilder, String... suggestions) {
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestBuilder
public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) {
SuggestResponse suggestResponse = client().prepareSuggest(INDEX).addSuggestion(suggestionName, suggestBuilder
).execute().actionGet();
CompletionSuggestSearchIT.assertSuggestions(suggestResponse, suggestionName, suggestions);
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.suggest;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Locale;
@ -31,6 +32,7 @@ import java.util.Map;
*/
public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestionsContext> {
public static CustomSuggester PROTOTYPE = new CustomSuggester();
// This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123
@Override
@ -52,23 +54,18 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
return response;
}
@Override
public SuggestContextParser getContextParser() {
return (parser, mapperService, fieldData) -> {
Map<String, Object> options = parser.map();
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
suggestionContext.setField((String) options.get("field"));
return suggestionContext;
};
}
public static class CustomSuggestionsContext extends SuggestionSearchContext.SuggestionContext {
public Map<String, Object> options;
public CustomSuggestionsContext(Suggester suggester, Map<String, Object> options) {
super(suggester);
public CustomSuggestionsContext(QueryShardContext context, Map<String, Object> options) {
super(new CustomSuggester(), context);
this.options = options;
}
}
@Override
public SuggestionBuilder<?> getBuilderPrototype() {
return CustomSuggesterSearchIT.CustomSuggestionBuilder.PROTOTYPE;
}
}

View File

@ -37,7 +37,7 @@ public class CustomSuggesterPlugin extends Plugin {
}
public void onModule(SearchModule searchModule) {
searchModule.registerSuggester("custom", CustomSuggester.class);
searchModule.registerSuggester("custom", CustomSuggester.PROTOTYPE);
}
}

View File

@ -20,17 +20,30 @@ package org.elasticsearch.search.suggest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.hasSize;
@ -59,16 +72,7 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase {
String randomField = randomAsciiOfLength(10);
String randomSuffix = randomAsciiOfLength(10);
SuggestBuilder suggestBuilder = new SuggestBuilder();
suggestBuilder.addSuggestion(
new SuggestBuilder.SuggestionBuilder<SuggestBuilder.SuggestionBuilder>("someName", "custom") {
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("field", randomField);
builder.field("suffix", randomSuffix);
return builder;
}
}.text(randomText)
);
suggestBuilder.addSuggestion("someName", new CustomSuggestionBuilder(randomField, randomSuffix).text(randomText));
SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test").setTypes("test").setFrom(0).setSize(1)
.suggest(suggestBuilder);
@ -76,11 +80,121 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase {
// TODO: infer type once JI-9019884 is fixed
// TODO: see also JDK-8039214
List<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestions
= CollectionUtils.<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>iterableAsArrayList(searchResponse.getSuggest().getSuggestion("someName"));
List<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestions =
CollectionUtils.<Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>iterableAsArrayList(
searchResponse.getSuggest().getSuggestion("someName"));
assertThat(suggestions, hasSize(2));
assertThat(suggestions.get(0).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-12", randomText, randomField, randomSuffix)));
assertThat(suggestions.get(1).getText().string(), is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix)));
assertThat(suggestions.get(0).getText().string(),
is(String.format(Locale.ROOT, "%s-%s-%s-12", randomText, randomField, randomSuffix)));
assertThat(suggestions.get(1).getText().string(),
is(String.format(Locale.ROOT, "%s-%s-%s-123", randomText, randomField, randomSuffix)));
}
public static class CustomSuggestionBuilder extends SuggestionBuilder<CustomSuggestionBuilder> {
public final static CustomSuggestionBuilder PROTOTYPE = new CustomSuggestionBuilder("_na_", "_na_");
protected static final ParseField RANDOM_SUFFIX_FIELD = new ParseField("suffix");
private String randomSuffix;
public CustomSuggestionBuilder(String randomField, String randomSuffix) {
super(randomField);
this.randomSuffix = randomSuffix;
}
@Override
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix);
return builder;
}
@Override
public String getWriteableName() {
return "custom";
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
out.writeString(randomSuffix);
}
@Override
public CustomSuggestionBuilder doReadFrom(StreamInput in, String field) throws IOException {
return new CustomSuggestionBuilder(field, in.readString());
}
@Override
protected boolean doEquals(CustomSuggestionBuilder other) {
return Objects.equals(randomSuffix, other.randomSuffix);
}
@Override
protected int doHashCode() {
return Objects.hash(randomSuffix);
}
@Override
protected CustomSuggestionBuilder innerFromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser();
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
XContentParser.Token token;
String currentFieldName = null;
String fieldname = null;
String suffix = null;
String analyzer = null;
int sizeField = -1;
int shardSize = -1;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.ANALYZER_FIELD)) {
analyzer = parser.text();
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.FIELDNAME_FIELD)) {
fieldname = parser.text();
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SIZE_FIELD)) {
sizeField = parser.intValue();
} else if (parseFieldMatcher.match(currentFieldName, SuggestionBuilder.SHARDSIZE_FIELD)) {
shardSize = parser.intValue();
} else if (parseFieldMatcher.match(currentFieldName, RANDOM_SUFFIX_FIELD)) {
suffix = parser.text();
}
} else {
throw new ParsingException(parser.getTokenLocation(),
"suggester[custom] doesn't support field [" + currentFieldName + "]");
}
}
// now we should have field name, check and copy fields over to the suggestion builder we return
if (fieldname == null) {
throw new ParsingException(parser.getTokenLocation(), "the required field option is missing");
}
CustomSuggestionBuilder builder = new CustomSuggestionBuilder(fieldname, suffix);
if (analyzer != null) {
builder.analyzer(analyzer);
}
if (sizeField != -1) {
builder.size(sizeField);
}
if (shardSize != -1) {
builder.shardSize(shardSize);
}
return builder;
}
@Override
public SuggestionContext build(QueryShardContext context) throws IOException {
Map<String, Object> options = new HashMap<>();
options.put(FIELDNAME_FIELD.getPreferredName(), field());
options.put(RANDOM_SUFFIX_FIELD.getPreferredName(), randomSuffix);
CustomSuggester.CustomSuggestionsContext customSuggestionsContext =
new CustomSuggester.CustomSuggestionsContext(context, options);
customSuggestionsContext.setField(field());
assert text != null;
customSuggestionsContext.setText(BytesRefs.toBytesRef(text));
return customSuggestionsContext;
}
}
}

View File

@ -0,0 +1,166 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.search.suggest.completion.CompletionSuggesterBuilderTests;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.elasticsearch.search.suggest.completion.WritableTestCase;
import org.elasticsearch.search.suggest.phrase.Laplace;
import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilderTests;
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.StupidBackoff;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilderTests;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Collections;
import java.util.Map.Entry;
public class SuggestBuilderTests extends WritableTestCase<SuggestBuilder> {
private static NamedWriteableRegistry namedWriteableRegistry;
/**
* Setup for the whole base test class.
*/
@BeforeClass
public static void init() {
NamedWriteableRegistry nwRegistry = new NamedWriteableRegistry();
nwRegistry.registerPrototype(SuggestionBuilder.class, TermSuggestionBuilder.PROTOTYPE);
nwRegistry.registerPrototype(SuggestionBuilder.class, PhraseSuggestionBuilder.PROTOTYPE);
nwRegistry.registerPrototype(SuggestionBuilder.class, CompletionSuggestionBuilder.PROTOTYPE);
nwRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE);
nwRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
nwRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
namedWriteableRegistry = nwRegistry;
}
@AfterClass
public static void afterClass() {
namedWriteableRegistry = null;
}
@Override
protected NamedWriteableRegistry provideNamedWritableRegistry() {
return namedWriteableRegistry;
}
/**
* creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original
*/
public void testFromXContent() throws IOException {
Suggesters suggesters = new Suggesters(Collections.emptyMap());
QueryParseContext context = new QueryParseContext(null);
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
SuggestBuilder suggestBuilder = createTestModel();
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
xContentBuilder.prettyPrint();
}
suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
XContentParser parser = XContentHelper.createParser(xContentBuilder.bytes());
context.reset(parser);
SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
assertNotSame(suggestBuilder, secondSuggestBuilder);
assertEquals(suggestBuilder, secondSuggestBuilder);
assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode());
}
}
public void testIllegalSuggestionName() {
try {
new SuggestBuilder().addSuggestion(null, PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder());
fail("exception expected");
} catch (NullPointerException e) {
assertEquals("every suggestion needs a name", e.getMessage());
}
try {
new SuggestBuilder().addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder())
.addSuggestion("my-suggest", PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder());
fail("exception expected");
} catch (IllegalArgumentException e) {
assertEquals("already added another suggestion with name [my-suggest]", e.getMessage());
}
}
@Override
protected SuggestBuilder createTestModel() {
return randomSuggestBuilder();
}
@Override
protected SuggestBuilder createMutation(SuggestBuilder original) throws IOException {
SuggestBuilder mutation = new SuggestBuilder().setGlobalText(original.getGlobalText());
for (Entry<String, SuggestionBuilder<?>> suggestionBuilder : original.getSuggestions().entrySet()) {
mutation.addSuggestion(suggestionBuilder.getKey(), suggestionBuilder.getValue());
}
if (randomBoolean()) {
mutation.setGlobalText(randomAsciiOfLengthBetween(5, 60));
} else {
mutation.addSuggestion(randomAsciiOfLength(10), PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder());
}
return mutation;
}
@Override
protected SuggestBuilder readFrom(StreamInput in) throws IOException {
return SuggestBuilder.PROTOTYPE.readFrom(in);
}
public static SuggestBuilder randomSuggestBuilder() {
SuggestBuilder builder = new SuggestBuilder();
if (randomBoolean()) {
builder.setGlobalText(randomAsciiOfLengthBetween(1, 20));
}
final int numSuggestions = randomIntBetween(1, 5);
for (int i = 0; i < numSuggestions; i++) {
builder.addSuggestion(randomAsciiOfLengthBetween(5, 10), randomSuggestionBuilder());
}
return builder;
}
private static SuggestionBuilder<?> randomSuggestionBuilder() {
switch (randomIntBetween(0, 2)) {
case 0: return TermSuggestionBuilderTests.randomTermSuggestionBuilder();
case 1: return PhraseSuggestionBuilderTests.randomPhraseSuggestionBuilder();
case 2: return CompletionSuggesterBuilderTests.randomCompletionSuggestionBuilder();
default: return TermSuggestionBuilderTests.randomTermSuggestionBuilder();
}
}
}

View File

@ -190,11 +190,11 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase {
XContentBuilder builder = jsonBuilder().value("context1");
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(1));
assertThat(queryContexts.get(0).context, equalTo("context1"));
assertThat(queryContexts.get(0).boost, equalTo(1));
assertThat(queryContexts.get(0).isPrefix, equalTo(false));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(1));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
}
public void testQueryContextParsingArray() throws Exception {
@ -204,14 +204,14 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase {
.endArray();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(2));
assertThat(queryContexts.get(0).context, equalTo("context1"));
assertThat(queryContexts.get(0).boost, equalTo(1));
assertThat(queryContexts.get(0).isPrefix, equalTo(false));
assertThat(queryContexts.get(1).context, equalTo("context2"));
assertThat(queryContexts.get(1).boost, equalTo(1));
assertThat(queryContexts.get(1).isPrefix, equalTo(false));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(2));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(1));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(1));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
}
public void testQueryContextParsingObject() throws Exception {
@ -222,11 +222,11 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase {
.endObject();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(1));
assertThat(queryContexts.get(0).context, equalTo("context1"));
assertThat(queryContexts.get(0).boost, equalTo(10));
assertThat(queryContexts.get(0).isPrefix, equalTo(true));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(10));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
}
@ -245,14 +245,14 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase {
.endArray();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(2));
assertThat(queryContexts.get(0).context, equalTo("context1"));
assertThat(queryContexts.get(0).boost, equalTo(2));
assertThat(queryContexts.get(0).isPrefix, equalTo(true));
assertThat(queryContexts.get(1).context, equalTo("context2"));
assertThat(queryContexts.get(1).boost, equalTo(3));
assertThat(queryContexts.get(1).isPrefix, equalTo(false));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(2));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(2));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(3));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
}
public void testQueryContextParsingMixed() throws Exception {
@ -266,14 +266,14 @@ public class CategoryContextMappingTests extends ESSingleNodeTestCase {
.endArray();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(2));
assertThat(queryContexts.get(0).context, equalTo("context1"));
assertThat(queryContexts.get(0).boost, equalTo(2));
assertThat(queryContexts.get(0).isPrefix, equalTo(true));
assertThat(queryContexts.get(1).context, equalTo("context2"));
assertThat(queryContexts.get(1).boost, equalTo(1));
assertThat(queryContexts.get(1).isPrefix, equalTo(false));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(2));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(2));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(1));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
}
public void testParsingContextFromDocument() throws Exception {

View File

@ -0,0 +1,76 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext;
import java.io.IOException;
public class CategoryQueryContextTests extends QueryContextTestCase<CategoryQueryContext> {
public static CategoryQueryContext randomCategoryQueryContext() {
final CategoryQueryContext.Builder builder = CategoryQueryContext.builder();
builder.setCategory(randomAsciiOfLength(10));
maybeSet(builder::setBoost, randomIntBetween(1, 10));
maybeSet(builder::setPrefix, randomBoolean());
return builder.build();
}
@Override
protected CategoryQueryContext createTestModel() {
return randomCategoryQueryContext();
}
@Override
protected CategoryQueryContext prototype() {
return CategoryQueryContext.PROTOTYPE;
}
public void testNullCategoryIsIllegal() {
final CategoryQueryContext categoryQueryContext = randomCategoryQueryContext();
final CategoryQueryContext.Builder builder = CategoryQueryContext.builder()
.setBoost(categoryQueryContext.getBoost())
.setPrefix(categoryQueryContext.isPrefix());
try {
builder.build();
fail("null category is illegal");
} catch (NullPointerException e) {
assertEquals(e.getMessage(), "category must not be null");
}
}
public void testIllegalArguments() {
final CategoryQueryContext.Builder builder = CategoryQueryContext.builder();
try {
builder.setCategory(null);
fail("category must not be null");
} catch (NullPointerException e) {
assertEquals(e.getMessage(), "category must not be null");
}
try {
builder.setBoost(-randomIntBetween(1, Integer.MAX_VALUE));
fail("boost must be positive");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "boost must be greater than 0");
}
}
}

View File

@ -0,0 +1,174 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping;
import org.elasticsearch.search.suggest.completion.context.CategoryQueryContext;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
import org.elasticsearch.search.suggest.completion.context.GeoContextMapping;
import org.elasticsearch.search.suggest.completion.context.GeoQueryContext;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.stream.Collectors;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.Matchers.containsString;
public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase<CompletionSuggestionBuilder> {
@Override
protected CompletionSuggestionBuilder randomSuggestionBuilder() {
return randomCompletionSuggestionBuilder();
}
public static CompletionSuggestionBuilder randomCompletionSuggestionBuilder() {
return randomSuggestionBuilderWithContextInfo().builder;
}
private static class BuilderAndInfo {
CompletionSuggestionBuilder builder;
List<String> catContexts = new ArrayList<>();
List<String> geoContexts = new ArrayList<>();
}
private static BuilderAndInfo randomSuggestionBuilderWithContextInfo() {
final BuilderAndInfo builderAndInfo = new BuilderAndInfo();
CompletionSuggestionBuilder testBuilder = new CompletionSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
setCommonPropertiesOnRandomBuilder(testBuilder);
switch (randomIntBetween(0, 3)) {
case 0:
testBuilder.prefix(randomAsciiOfLength(10));
break;
case 1:
testBuilder.prefix(randomAsciiOfLength(10), FuzzyOptionsTests.randomFuzzyOptions());
break;
case 2:
testBuilder.prefix(randomAsciiOfLength(10), randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO));
break;
case 3:
testBuilder.regex(randomAsciiOfLength(10), RegexOptionsTests.randomRegexOptions());
break;
}
List<String> payloads = new ArrayList<>();
Collections.addAll(payloads, generateRandomStringArray(5, 10, false, false));
maybeSet(testBuilder::payload, payloads);
Map<String, List<? extends QueryContext>> contextMap = new HashMap<>();
if (randomBoolean()) {
int numContext = randomIntBetween(1, 5);
List<CategoryQueryContext> contexts = new ArrayList<>(numContext);
for (int i = 0; i < numContext; i++) {
contexts.add(CategoryQueryContextTests.randomCategoryQueryContext());
}
String name = randomAsciiOfLength(10);
contextMap.put(name, contexts);
builderAndInfo.catContexts.add(name);
}
if (randomBoolean()) {
int numContext = randomIntBetween(1, 5);
List<GeoQueryContext> contexts = new ArrayList<>(numContext);
for (int i = 0; i < numContext; i++) {
contexts.add(GeoQueryContextTests.randomGeoQueryContext());
}
String name = randomAsciiOfLength(10);
contextMap.put(name, contexts);
builderAndInfo.geoContexts.add(name);
}
testBuilder.contexts(contextMap);
builderAndInfo.builder = testBuilder;
return builderAndInfo;
}
@Override
protected void mutateSpecificParameters(CompletionSuggestionBuilder builder) throws IOException {
switch (randomIntBetween(0, 5)) {
case 0:
List<String> payloads = new ArrayList<>();
Collections.addAll(payloads, generateRandomStringArray(5, 10, false, false));
builder.payload(payloads);
break;
case 1:
int nCatContext = randomIntBetween(1, 5);
List<CategoryQueryContext> contexts = new ArrayList<>(nCatContext);
for (int i = 0; i < nCatContext; i++) {
contexts.add(CategoryQueryContextTests.randomCategoryQueryContext());
}
builder.contexts(Collections.singletonMap(randomAsciiOfLength(10), contexts));
break;
case 2:
int nGeoContext = randomIntBetween(1, 5);
List<GeoQueryContext> geoContexts = new ArrayList<>(nGeoContext);
for (int i = 0; i < nGeoContext; i++) {
geoContexts.add(GeoQueryContextTests.randomGeoQueryContext());
}
builder.contexts(Collections.singletonMap(randomAsciiOfLength(10), geoContexts));
break;
case 3:
builder.prefix(randomAsciiOfLength(10), FuzzyOptionsTests.randomFuzzyOptions());
break;
case 4:
builder.prefix(randomAsciiOfLength(10), randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO));
break;
case 5:
builder.regex(randomAsciiOfLength(10), RegexOptionsTests.randomRegexOptions());
break;
default:
throw new IllegalStateException("should not through");
}
}
/**
* Test that a malformed JSON suggestion request fails.
*/
public void testMalformedJsonRequestPayload() throws Exception {
final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
final String payload = "{\n" +
" \"bad-payload\" : { \n" +
" \"prefix\" : \"sug\",\n" +
" \"completion\" : { \n" +
" \"field\" : \"" + field + "\",\n " +
" \"payload\" : [ {\"payload\":\"field\"} ]\n" +
" }\n" +
" }\n" +
"}\n";
try {
final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(payload), suggesters);
fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder);
} catch (ParsingException e) {
assertThat(e.getMessage(), containsString("failed to parse field [payload]"));
}
}
}

View File

@ -0,0 +1,131 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.unit.Fuzziness;
import java.io.IOException;
public class FuzzyOptionsTests extends WritableTestCase<FuzzyOptions> {
public static FuzzyOptions randomFuzzyOptions() {
final FuzzyOptions.Builder builder = FuzzyOptions.builder();
if (randomBoolean()) {
maybeSet(builder::setFuzziness, randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO));
} else {
maybeSet(builder::setFuzziness, randomFrom(0, 1, 2));
}
maybeSet(builder::setFuzzyMinLength, randomIntBetween(0, 10));
maybeSet(builder::setFuzzyPrefixLength, randomIntBetween(0, 10));
maybeSet(builder::setMaxDeterminizedStates, randomIntBetween(1, 1000));
maybeSet(builder::setTranspositions, randomBoolean());
maybeSet(builder::setUnicodeAware, randomBoolean());
return builder.build();
}
@Override
protected FuzzyOptions createTestModel() {
return randomFuzzyOptions();
}
@Override
protected FuzzyOptions createMutation(FuzzyOptions original) throws IOException {
final FuzzyOptions.Builder builder = FuzzyOptions.builder();
builder.setFuzziness(original.getEditDistance())
.setFuzzyPrefixLength(original.getFuzzyPrefixLength())
.setFuzzyMinLength(original.getFuzzyMinLength())
.setMaxDeterminizedStates(original.getMaxDeterminizedStates())
.setTranspositions(original.isTranspositions())
.setUnicodeAware(original.isUnicodeAware());
switch (randomIntBetween(0, 5)) {
case 0:
builder.setFuzziness(randomValueOtherThan(original.getEditDistance(), () -> randomFrom(0, 1, 2)));
break;
case 1:
builder.setFuzzyPrefixLength(randomValueOtherThan(original.getFuzzyPrefixLength(), () ->
randomIntBetween(1, 3)));
break;
case 2:
builder.setFuzzyMinLength(randomValueOtherThan(original.getFuzzyMinLength(), () ->
randomIntBetween(1, 3)));
break;
case 3:
builder.setMaxDeterminizedStates(randomValueOtherThan(original.getMaxDeterminizedStates(), () ->
randomIntBetween(1, 10)));
break;
case 4:
builder.setTranspositions(!original.isTranspositions());
break;
case 5:
builder.setUnicodeAware(!original.isUnicodeAware());
break;
}
return builder.build();
}
@Override
protected FuzzyOptions readFrom(StreamInput in) throws IOException {
return FuzzyOptions.readFuzzyOptions(in);
}
public void testIllegalArguments() {
final FuzzyOptions.Builder builder = FuzzyOptions.builder();
try {
builder.setFuzziness(-randomIntBetween(1, Integer.MAX_VALUE));
fail("fuzziness must be > 0");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "fuzziness must be between 0 and 2");
}
try {
builder.setFuzziness(randomIntBetween(3, Integer.MAX_VALUE));
fail("fuzziness must be < 2");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "fuzziness must be between 0 and 2");
}
try {
builder.setFuzziness(null);
fail("fuzziness must not be null");
} catch (NullPointerException e) {
assertEquals(e.getMessage(), "fuzziness must not be null");
}
try {
builder.setFuzzyMinLength(-randomIntBetween(1, Integer.MAX_VALUE));
fail("fuzzyMinLength must be >= 0");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "fuzzyMinLength must not be negative");
}
try {
builder.setFuzzyPrefixLength(-randomIntBetween(1, Integer.MAX_VALUE));
fail("fuzzyPrefixLength must be >= 0");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "fuzzyPrefixLength must not be negative");
}
try {
builder.setMaxDeterminizedStates(-randomIntBetween(1, Integer.MAX_VALUE));
fail("max determinized state must be >= 0");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "maxDeterminizedStates must not be negative");
}
}
}

View File

@ -202,15 +202,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
XContentBuilder builder = jsonBuilder().value("ezs42e44yx96");
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
GeoContextMapping mapping = ContextBuilder.geo("geo").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(1 + 8));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1 + 8));
Collection<String> locations = new ArrayList<>();
locations.add("ezs42e");
addNeighbors("ezs42e", GeoContextMapping.DEFAULT_PRECISION, locations);
for (ContextMapping.QueryContext queryContext : queryContexts) {
assertThat(queryContext.context, isIn(locations));
assertThat(queryContext.boost, equalTo(1));
assertThat(queryContext.isPrefix, equalTo(false));
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
assertThat(internalQueryContext.context, isIn(locations));
assertThat(internalQueryContext.boost, equalTo(1));
assertThat(internalQueryContext.isPrefix, equalTo(false));
}
}
@ -221,15 +221,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
.endObject();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
GeoContextMapping mapping = ContextBuilder.geo("geo").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(1 + 8));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1 + 8));
Collection<String> locations = new ArrayList<>();
locations.add("wh0n94");
addNeighbors("wh0n94", GeoContextMapping.DEFAULT_PRECISION, locations);
for (ContextMapping.QueryContext queryContext : queryContexts) {
assertThat(queryContext.context, isIn(locations));
assertThat(queryContext.boost, equalTo(1));
assertThat(queryContext.isPrefix, equalTo(false));
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
assertThat(internalQueryContext.context, isIn(locations));
assertThat(internalQueryContext.boost, equalTo(1));
assertThat(internalQueryContext.isPrefix, equalTo(false));
}
}
@ -244,8 +244,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
.endObject();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
GeoContextMapping mapping = ContextBuilder.geo("geo").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8));
Collection<String> locations = new ArrayList<>();
locations.add("wh0n94");
locations.add("w");
@ -254,10 +254,10 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
addNeighbors("wh", 2, locations);
locations.add("wh0");
addNeighbors("wh0", 3, locations);
for (ContextMapping.QueryContext queryContext : queryContexts) {
assertThat(queryContext.context, isIn(locations));
assertThat(queryContext.boost, equalTo(10));
assertThat(queryContext.isPrefix, equalTo(queryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
assertThat(internalQueryContext.context, isIn(locations));
assertThat(internalQueryContext.boost, equalTo(10));
assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
}
}
@ -282,8 +282,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
.endArray();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
GeoContextMapping mapping = ContextBuilder.geo("geo").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 1 + 8));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8 + 1 + 1 + 8));
Collection<String> firstLocations = new ArrayList<>();
firstLocations.add("wh0n94");
firstLocations.add("w");
@ -296,15 +296,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
secondLocations.add("w5cx04");
secondLocations.add("w5cx0");
addNeighbors("w5cx0", 5, secondLocations);
for (ContextMapping.QueryContext queryContext : queryContexts) {
if (firstLocations.contains(queryContext.context)) {
assertThat(queryContext.boost, equalTo(10));
} else if (secondLocations.contains(queryContext.context)) {
assertThat(queryContext.boost, equalTo(2));
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
if (firstLocations.contains(internalQueryContext.context)) {
assertThat(internalQueryContext.boost, equalTo(10));
} else if (secondLocations.contains(internalQueryContext.context)) {
assertThat(internalQueryContext.boost, equalTo(2));
} else {
fail(queryContext.context + " was not expected");
fail(internalQueryContext.context + " was not expected");
}
assertThat(queryContext.isPrefix, equalTo(queryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
}
}
@ -325,8 +325,8 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
.endArray();
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(builder.bytes());
GeoContextMapping mapping = ContextBuilder.geo("geo").build();
List<ContextMapping.QueryContext> queryContexts = mapping.parseQueryContext(parser);
assertThat(queryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8));
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1 + 1 + 8 + 1 + 8 + 1 + 8));
Collection<String> firstLocations = new ArrayList<>();
firstLocations.add("wh0n94");
firstLocations.add("w");
@ -336,15 +336,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
Collection<String> secondLocations = new ArrayList<>();
secondLocations.add("w5cx04");
addNeighbors("w5cx04", 6, secondLocations);
for (ContextMapping.QueryContext queryContext : queryContexts) {
if (firstLocations.contains(queryContext.context)) {
assertThat(queryContext.boost, equalTo(10));
} else if (secondLocations.contains(queryContext.context)) {
assertThat(queryContext.boost, equalTo(1));
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
if (firstLocations.contains(internalQueryContext.context)) {
assertThat(internalQueryContext.boost, equalTo(10));
} else if (secondLocations.contains(internalQueryContext.context)) {
assertThat(internalQueryContext.boost, equalTo(1));
} else {
fail(queryContext.context + " was not expected");
fail(internalQueryContext.context + " was not expected");
}
assertThat(queryContext.isPrefix, equalTo(queryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
assertThat(internalQueryContext.isPrefix, equalTo(internalQueryContext.context.length() < GeoContextMapping.DEFAULT_PRECISION));
}
}
}

View File

@ -0,0 +1,107 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.search.suggest.completion.context.GeoQueryContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.Matchers.equalTo;
public class GeoQueryContextTests extends QueryContextTestCase<GeoQueryContext> {
public static GeoQueryContext randomGeoQueryContext() {
final GeoQueryContext.Builder builder = GeoQueryContext.builder();
builder.setGeoPoint(new GeoPoint(randomDouble(), randomDouble()));
maybeSet(builder::setBoost, randomIntBetween(1, 10));
maybeSet(builder::setPrecision, randomIntBetween(1, 12));
List<Integer> neighbours = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 12); i++) {
neighbours.add(randomIntBetween(1, 12));
}
maybeSet(builder::setNeighbours, neighbours);
return builder.build();
}
@Override
protected GeoQueryContext createTestModel() {
return randomGeoQueryContext();
}
@Override
protected GeoQueryContext prototype() {
return GeoQueryContext.PROTOTYPE;
}
public void testNullGeoPointIsIllegal() {
final GeoQueryContext geoQueryContext = randomGeoQueryContext();
final GeoQueryContext.Builder builder = GeoQueryContext.builder()
.setNeighbours(geoQueryContext.getNeighbours())
.setPrecision(geoQueryContext.getPrecision())
.setBoost(geoQueryContext.getBoost());
try {
builder.build();
fail("null geo point is illegal");
} catch (NullPointerException e) {
assertThat(e.getMessage(), equalTo("geoPoint must not be null"));
}
}
public void testIllegalArguments() {
final GeoQueryContext.Builder builder = GeoQueryContext.builder();
try {
builder.setGeoPoint(null);
fail("geoPoint must not be null");
} catch (NullPointerException e) {
assertEquals(e.getMessage(), "geoPoint must not be null");
}
try {
builder.setBoost(-randomIntBetween(1, Integer.MAX_VALUE));
fail("boost must be positive");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "boost must be greater than 0");
}
int precision = 0;
try {
do {
precision = randomInt();
} while (precision >= 1 && precision <= 12);
builder.setPrecision(precision);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "precision must be between 1 and 12");
}
try {
List<Integer> neighbours = new ArrayList<>();
neighbours.add(precision);
for (int i = 1; i < randomIntBetween(1, 11); i++) {
neighbours.add(i);
}
Collections.shuffle(neighbours, random());
builder.setNeighbours(neighbours);
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "neighbour value must be between 1 and 12");
}
}
}

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.suggest.completion.context.QueryContext;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static junit.framework.TestCase.assertEquals;
public abstract class QueryContextTestCase<QC extends QueryContext> extends ESTestCase {
private static final int NUMBER_OF_RUNS = 20;
/**
* create random model that is put under test
*/
protected abstract QC createTestModel();
/**
* query context prototype to read serialized format
*/
protected abstract QC prototype();
public void testToXContext() throws IOException {
for (int i = 0; i < NUMBER_OF_RUNS; i++) {
QueryContext toXContent = createTestModel();
XContentBuilder builder = XContentFactory.jsonBuilder();
toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS);
BytesReference bytesReference = builder.bytes();
XContentParser parser = XContentFactory.xContent(bytesReference).createParser(bytesReference);
parser.nextToken();
QueryContext fromXContext = prototype().fromXContext(parser);
assertEquals(toXContent, fromXContext);
assertEquals(toXContent.hashCode(), fromXContext.hashCode());
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.index.query.RegexpFlag;
import java.io.IOException;
public class RegexOptionsTests extends WritableTestCase<RegexOptions> {
public static RegexOptions randomRegexOptions() {
final RegexOptions.Builder builder = RegexOptions.builder();
maybeSet(builder::setMaxDeterminizedStates, randomIntBetween(1, 1000));
StringBuilder sb = new StringBuilder();
for (RegexpFlag regexpFlag : RegexpFlag.values()) {
if (randomBoolean()) {
if (sb.length() != 0) {
sb.append("|");
}
sb.append(regexpFlag.name());
}
}
maybeSet(builder::setFlags, sb.toString());
return builder.build();
}
@Override
protected RegexOptions createTestModel() {
return randomRegexOptions();
}
@Override
protected RegexOptions createMutation(RegexOptions original) throws IOException {
final RegexOptions.Builder builder = RegexOptions.builder();
builder.setMaxDeterminizedStates(randomValueOtherThan(original.getMaxDeterminizedStates(), () -> randomIntBetween(1, 10)));
return builder.build();
}
@Override
protected RegexOptions readFrom(StreamInput in) throws IOException {
return RegexOptions.readRegexOptions(in);
}
public void testIllegalArgument() {
final RegexOptions.Builder builder = RegexOptions.builder();
try {
builder.setMaxDeterminizedStates(-randomIntBetween(1, Integer.MAX_VALUE));
fail("max determinized state must be positive");
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "maxDeterminizedStates must not be negative");
}
}
}

View File

@ -0,0 +1,115 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
/**
* Base class for testing serialization and equality for
* {@link Writeable} models
*/
public abstract class WritableTestCase<M extends Writeable> extends ESTestCase {
protected static final int NUMBER_OF_RUNS = 20;
/**
* create random model that is put under test
*/
protected abstract M createTestModel();
/**
* mutate the given model so the returned model is different
*/
protected abstract M createMutation(M original) throws IOException;
/**
* model prototype to read serialized format
*/
protected abstract M readFrom(StreamInput in) throws IOException;
/**
* Test serialization and deserialization of the tested model.
*/
public void testSerialization() throws IOException {
for (int i = 0; i < NUMBER_OF_RUNS; i++) {
M testModel = createTestModel();
M deserializedModel = copyModel(testModel);
assertEquals(testModel, deserializedModel);
assertEquals(testModel.hashCode(), deserializedModel.hashCode());
assertNotSame(testModel, deserializedModel);
}
}
/**
* Test equality and hashCode properties
*/
@SuppressWarnings("unchecked")
public void testEqualsAndHashcode() throws IOException {
M firstModel = createTestModel();
String modelName = firstModel.getClass().getSimpleName();
assertFalse(modelName + " is equal to null", firstModel.equals(null));
assertFalse(modelName + " is equal to incompatible type", firstModel.equals(""));
assertTrue(modelName + " is not equal to self", firstModel.equals(firstModel));
assertThat("same "+ modelName + "'s hashcode returns different values if called multiple times", firstModel.hashCode(),
equalTo(firstModel.hashCode()));
assertThat("different " + modelName + " should not be equal", createMutation(firstModel), not(equalTo(firstModel)));
M secondModel = copyModel(firstModel);
assertTrue(modelName + " is not equal to self", secondModel.equals(secondModel));
assertTrue(modelName + " is not equal to its copy", firstModel.equals(secondModel));
assertTrue("equals is not symmetric", secondModel.equals(firstModel));
assertThat(modelName + " copy's hashcode is different from original hashcode", secondModel.hashCode(),
equalTo(firstModel.hashCode()));
M thirdModel = copyModel(secondModel);
assertTrue(modelName + " is not equal to self", thirdModel.equals(thirdModel));
assertTrue(modelName + " is not equal to its copy", secondModel.equals(thirdModel));
assertThat(modelName + " copy's hashcode is different from original hashcode", secondModel.hashCode(),
equalTo(thirdModel.hashCode()));
assertTrue("equals is not transitive", firstModel.equals(thirdModel));
assertThat(modelName + " copy's hashcode is different from original hashcode", firstModel.hashCode(),
equalTo(thirdModel.hashCode()));
assertTrue(modelName + " equals is not symmetric", thirdModel.equals(secondModel));
assertTrue(modelName + " equals is not symmetric", thirdModel.equals(firstModel));
}
private M copyModel(M original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
original.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), provideNamedWritableRegistry())) {
return readFrom(in);
}
}
}
protected NamedWriteableRegistry provideNamedWritableRegistry() {
return new NamedWriteableRegistry();
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.suggest.phrase;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -31,28 +30,14 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.function.Consumer;
import static org.hamcrest.Matchers.equalTo;
@ -148,73 +133,22 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
}
}
/**
* test that build() outputs a {@link DirectCandidateGenerator} that is similar to the one
* we would get when parsing the xContent the test generator is rendering out
*/
public void testBuild() throws IOException {
long start = System.currentTimeMillis();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), Settings.EMPTY);
AnalysisService mockAnalysisService = new AnalysisService(idxSettings, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()) {
@Override
public NamedAnalyzer analyzer(String name) {
return new NamedAnalyzer(name, new WhitespaceAnalyzer());
}
};
MapperService mockMapperService = new MapperService(idxSettings, mockAnalysisService , null, new IndicesModule().getMapperRegistry(), null) {
@Override
public MappedFieldType fullName(String fullName) {
return new StringFieldType();
}
};
QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, mockMapperService, null, null, null) {
@Override
public MappedFieldType fieldMapper(String name) {
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name);
return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType();
}
};
mockShardContext.setMapUnmappedFieldAsString(true);
for (int runs = 0; runs < NUMBER_OF_RUNS; runs++) {
DirectCandidateGeneratorBuilder generator = randomCandidateGenerator();
// first, build via DirectCandidateGenerator#build()
DirectCandidateGenerator contextGenerator = generator.build(mockShardContext);
// second, render random test generator to xContent and parse using
// PhraseSuggestParser
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
generator.toXContent(builder, ToXContent.EMPTY_PARAMS);
XContentParser parser = XContentHelper.createParser(builder.bytes());
DirectCandidateGenerator secondGenerator = PhraseSuggestParser.parseCandidateGenerator(parser,
mockShardContext.getMapperService(), mockShardContext.parseFieldMatcher());
// compare their properties
assertNotSame(contextGenerator, secondGenerator);
assertEquals(contextGenerator.field(), secondGenerator.field());
assertEquals(contextGenerator.accuracy(), secondGenerator.accuracy(), Float.MIN_VALUE);
assertEquals(contextGenerator.maxTermFreq(), secondGenerator.maxTermFreq(), Float.MIN_VALUE);
assertEquals(contextGenerator.maxEdits(), secondGenerator.maxEdits());
assertEquals(contextGenerator.maxInspections(), secondGenerator.maxInspections());
assertEquals(contextGenerator.minDocFreq(), secondGenerator.minDocFreq(), Float.MIN_VALUE);
assertEquals(contextGenerator.minWordLength(), secondGenerator.minWordLength());
assertEquals(contextGenerator.postFilter(), secondGenerator.postFilter());
assertEquals(contextGenerator.prefixLength(), secondGenerator.prefixLength());
assertEquals(contextGenerator.preFilter(), secondGenerator.preFilter());
assertEquals(contextGenerator.sort(), secondGenerator.sort());
assertEquals(contextGenerator.size(), secondGenerator.size());
// some instances of StringDistance don't support equals, just checking the class here
assertEquals(contextGenerator.stringDistance().getClass(), secondGenerator.stringDistance().getClass());
assertEquals(contextGenerator.suggestMode(), secondGenerator.suggestMode());
}
public static void assertEqualGenerators(DirectCandidateGenerator first, DirectCandidateGenerator second) {
assertEquals(first.field(), second.field());
assertEquals(first.accuracy(), second.accuracy(), Float.MIN_VALUE);
assertEquals(first.maxTermFreq(), second.maxTermFreq(), Float.MIN_VALUE);
assertEquals(first.maxEdits(), second.maxEdits());
assertEquals(first.maxInspections(), second.maxInspections());
assertEquals(first.minDocFreq(), second.minDocFreq(), Float.MIN_VALUE);
assertEquals(first.minWordLength(), second.minWordLength());
assertEquals(first.postFilter(), second.postFilter());
assertEquals(first.prefixLength(), second.prefixLength());
assertEquals(first.preFilter(), second.preFilter());
assertEquals(first.sort(), second.sort());
assertEquals(first.size(), second.size());
// some instances of StringDistance don't support equals, just checking the class here
assertEquals(first.stringDistance().getClass(), second.stringDistance().getClass());
assertEquals(first.suggestMode(), second.suggestMode());
}
/**
@ -306,12 +240,6 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
return generator;
}
private static <T> void maybeSet(Consumer<T> consumer, T value) {
if (randomBoolean()) {
consumer.accept(value);
}
}
private static DirectCandidateGeneratorBuilder serializedCopy(DirectCandidateGeneratorBuilder original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
original.writeTo(output);

View File

@ -19,15 +19,17 @@
package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
import static org.hamcrest.Matchers.instanceOf;
public class LaplaceModelTests extends SmoothingModelTestCase {
@Override
protected SmoothingModel createTestModel() {
return createRandomModel();
}
static SmoothingModel createRandomModel() {
return new Laplace(randomDoubleBetween(0.0, 10.0, false));
}

View File

@ -19,15 +19,16 @@
package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
import static org.hamcrest.Matchers.instanceOf;
public class LinearInterpolationModelTests extends SmoothingModelTestCase {
@Override
protected SmoothingModel createTestModel() {
return createRandomModel();
}
static LinearInterpolation createRandomModel() {
double trigramLambda = randomDoubleBetween(0.0, 10.0, false);
double bigramLambda = randomDoubleBetween(0.0, 10.0, false);
double unigramLambda = randomDoubleBetween(0.0, 10.0, false);

View File

@ -0,0 +1,242 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.script.Template;
import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import static org.hamcrest.Matchers.instanceOf;
public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase<PhraseSuggestionBuilder> {
@BeforeClass
public static void initSmoothingModels() {
namedWriteableRegistry.registerPrototype(SmoothingModel.class, Laplace.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, LinearInterpolation.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SmoothingModel.class, StupidBackoff.PROTOTYPE);
}
@Override
protected PhraseSuggestionBuilder randomSuggestionBuilder() {
return randomPhraseSuggestionBuilder();
}
public static PhraseSuggestionBuilder randomPhraseSuggestionBuilder() {
PhraseSuggestionBuilder testBuilder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
setCommonPropertiesOnRandomBuilder(testBuilder);
maybeSet(testBuilder::maxErrors, randomFloat());
maybeSet(testBuilder::separator, randomAsciiOfLengthBetween(1, 10));
maybeSet(testBuilder::realWordErrorLikelihood, randomFloat());
maybeSet(testBuilder::confidence, randomFloat());
maybeSet(testBuilder::collateQuery, randomAsciiOfLengthBetween(3, 20));
// collate query prune and parameters will only be used when query is set
if (testBuilder.collateQuery() != null) {
maybeSet(testBuilder::collatePrune, randomBoolean());
if (randomBoolean()) {
Map<String, Object> collateParams = new HashMap<>();
int numParams = randomIntBetween(1, 5);
for (int i = 0; i < numParams; i++) {
collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5));
}
testBuilder.collateParams(collateParams );
}
}
if (randomBoolean()) {
// preTag, postTag
testBuilder.highlight(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20));
}
maybeSet(testBuilder::gramSize, randomIntBetween(1, 5));
maybeSet(testBuilder::forceUnigrams, randomBoolean());
maybeSet(testBuilder::tokenLimit, randomIntBetween(1, 20));
if (randomBoolean()) {
testBuilder.smoothingModel(randomSmoothingModel());
}
if (randomBoolean()) {
int numGenerators = randomIntBetween(1, 5);
for (int i = 0; i < numGenerators; i++) {
testBuilder.addCandidateGenerator(DirectCandidateGeneratorTests.randomCandidateGenerator());
}
}
return testBuilder;
}
private static SmoothingModel randomSmoothingModel() {
SmoothingModel model = null;
switch (randomIntBetween(0,2)) {
case 0:
model = LaplaceModelTests.createRandomModel();
break;
case 1:
model = StupidBackoffModelTests.createRandomModel();
break;
case 2:
model = LinearInterpolationModelTests.createRandomModel();
break;
}
return model;
}
@Override
protected void mutateSpecificParameters(PhraseSuggestionBuilder builder) throws IOException {
switch (randomIntBetween(0, 12)) {
case 0:
builder.maxErrors(randomValueOtherThan(builder.maxErrors(), () -> randomFloat()));
break;
case 1:
builder.realWordErrorLikelihood(randomValueOtherThan(builder.realWordErrorLikelihood(), () -> randomFloat()));
break;
case 2:
builder.confidence(randomValueOtherThan(builder.confidence(), () -> randomFloat()));
break;
case 3:
builder.gramSize(randomValueOtherThan(builder.gramSize(), () -> randomIntBetween(1, 5)));
break;
case 4:
builder.tokenLimit(randomValueOtherThan(builder.tokenLimit(), () -> randomIntBetween(1, 20)));
break;
case 5:
builder.separator(randomValueOtherThan(builder.separator(), () -> randomAsciiOfLengthBetween(1, 10)));
break;
case 6:
Template collateQuery = builder.collateQuery();
if (collateQuery != null) {
builder.collateQuery(randomValueOtherThan(collateQuery.getScript(), () -> randomAsciiOfLengthBetween(3, 20)));
} else {
builder.collateQuery(randomAsciiOfLengthBetween(3, 20));
}
break;
case 7:
builder.collatePrune(builder.collatePrune() == null ? randomBoolean() : !builder.collatePrune() );
break;
case 8:
// preTag, postTag
String currentPre = builder.preTag();
if (currentPre != null) {
// simply double both values
builder.highlight(builder.preTag() + builder.preTag(), builder.postTag() + builder.postTag());
} else {
builder.highlight(randomAsciiOfLengthBetween(3, 20), randomAsciiOfLengthBetween(3, 20));
}
break;
case 9:
builder.forceUnigrams(builder.forceUnigrams() == null ? randomBoolean() : ! builder.forceUnigrams());
break;
case 10:
Map<String, Object> collateParams = builder.collateParams() == null ? new HashMap<>(1) : builder.collateParams();
collateParams.put(randomAsciiOfLength(5), randomAsciiOfLength(5));
builder.collateParams(collateParams);
break;
case 11:
builder.smoothingModel(randomValueOtherThan(builder.smoothingModel(), PhraseSuggestionBuilderTests::randomSmoothingModel));
break;
case 12:
builder.addCandidateGenerator(DirectCandidateGeneratorTests.randomCandidateGenerator());
break;
}
}
public void testInvalidParameters() throws IOException {
// test missing field name
try {
new PhraseSuggestionBuilder(null);
fail("Should not allow null as field name");
} catch (NullPointerException e) {
assertEquals("suggestion requires a field name", e.getMessage());
}
// test emtpy field name
try {
new PhraseSuggestionBuilder("");
fail("Should not allow empty string as field name");
} catch (IllegalArgumentException e) {
assertEquals("suggestion field name is empty", e.getMessage());
}
PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
try {
builder.gramSize(0);
fail("Should not allow gramSize < 1");
} catch (IllegalArgumentException e) {
assertEquals("gramSize must be >= 1", e.getMessage());
}
try {
builder.gramSize(-1);
fail("Should not allow gramSize < 1");
} catch (IllegalArgumentException e) {
assertEquals("gramSize must be >= 1", e.getMessage());
}
try {
builder.maxErrors(-1);
fail("Should not allow maxErrors < 0");
} catch (IllegalArgumentException e) {
assertEquals("max_error must be > 0.0", e.getMessage());
}
try {
builder.separator(null);
fail("Should not allow null as separator");
} catch (NullPointerException e) {
assertEquals("separator cannot be set to null", e.getMessage());
}
try {
builder.realWordErrorLikelihood(-1);
fail("Should not allow real world error likelihood < 0");
} catch (IllegalArgumentException e) {
assertEquals("real_word_error_likelihood must be > 0.0", e.getMessage());
}
try {
builder.confidence(-1);
fail("Should not allow confidence < 0");
} catch (IllegalArgumentException e) {
assertEquals("confidence must be >= 0.0", e.getMessage());
}
try {
builder.tokenLimit(0);
fail("token_limit must be >= 1");
} catch (IllegalArgumentException e) {
assertEquals("token_limit must be >= 1", e.getMessage());
}
try {
if (randomBoolean()) {
builder.highlight(null, "</b>");
} else {
builder.highlight("<b>", null);
}
fail("Pre and post tag must both be null or both not be null.");
} catch (IllegalArgumentException e) {
assertEquals("Pre and post tag must both be null or both not be null.", e.getMessage());
}
}
}

View File

@ -45,10 +45,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff;
import org.elasticsearch.test.ESTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -97,7 +93,8 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
* Test that creates new smoothing model from a random test smoothing model and checks both for equality
*/
public void testFromXContent() throws IOException {
QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(Settings.settingsBuilder().build(), Collections.emptyMap()));
QueryParseContext context = new QueryParseContext(
new IndicesQueriesRegistry(Settings.settingsBuilder().build(), Collections.emptyMap()));
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
SmoothingModel testModel = createTestModel();
@ -113,7 +110,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
parser.nextToken(); // go to start token, real parsing would do that in the outer element parser
SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class,
testModel.getWriteableName());
SmoothingModel parsedModel = prototype.fromXContent(context);
SmoothingModel parsedModel = prototype.innerFromXContent(context);
assertNotSame(testModel, parsedModel);
assertEquals(testModel, parsedModel);
assertEquals(testModel.hashCode(), parsedModel.hashCode());
@ -134,7 +131,8 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
writer.addDocument(doc);
DirectoryReader ir = DirectoryReader.open(writer);
WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir , "field"), "field", 0.9d, BytesRefs.toBytesRef(" "));
WordScorer wordScorer = testModel.buildWordScorerFactory().newScorer(ir, MultiFields.getTerms(ir, "field"), "field", 0.9d,
BytesRefs.toBytesRef(" "));
assertWordScorer(wordScorer, testModel);
}
@ -159,35 +157,39 @@ public abstract class SmoothingModelTestCase extends ESTestCase {
*/
@SuppressWarnings("unchecked")
public void testEqualsAndHashcode() throws IOException {
SmoothingModel firstModel = createTestModel();
assertFalse("smoothing model is equal to null", firstModel.equals(null));
assertFalse("smoothing model is equal to incompatible type", firstModel.equals(""));
assertTrue("smoothing model is not equal to self", firstModel.equals(firstModel));
assertThat("same smoothing model's hashcode returns different values if called multiple times", firstModel.hashCode(),
equalTo(firstModel.hashCode()));
assertThat("different smoothing models should not be equal", createMutation(firstModel), not(equalTo(firstModel)));
SmoothingModel firstModel = createTestModel();
assertFalse("smoothing model is equal to null", firstModel.equals(null));
assertFalse("smoothing model is equal to incompatible type", firstModel.equals(""));
assertTrue("smoothing model is not equal to self", firstModel.equals(firstModel));
assertThat("same smoothing model's hashcode returns different values if called multiple times", firstModel.hashCode(),
equalTo(firstModel.hashCode()));
assertThat("different smoothing models should not be equal", createMutation(firstModel), not(equalTo(firstModel)));
SmoothingModel secondModel = copyModel(firstModel);
assertTrue("smoothing model is not equal to self", secondModel.equals(secondModel));
assertTrue("smoothing model is not equal to its copy", firstModel.equals(secondModel));
assertTrue("equals is not symmetric", secondModel.equals(firstModel));
assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), equalTo(firstModel.hashCode()));
SmoothingModel secondModel = copyModel(firstModel);
assertTrue("smoothing model is not equal to self", secondModel.equals(secondModel));
assertTrue("smoothing model is not equal to its copy", firstModel.equals(secondModel));
assertTrue("equals is not symmetric", secondModel.equals(firstModel));
assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(),
equalTo(firstModel.hashCode()));
SmoothingModel thirdModel = copyModel(secondModel);
assertTrue("smoothing model is not equal to self", thirdModel.equals(thirdModel));
assertTrue("smoothing model is not equal to its copy", secondModel.equals(thirdModel));
assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(), equalTo(thirdModel.hashCode()));
assertTrue("equals is not transitive", firstModel.equals(thirdModel));
assertThat("smoothing model copy's hashcode is different from original hashcode", firstModel.hashCode(), equalTo(thirdModel.hashCode()));
assertTrue("equals is not symmetric", thirdModel.equals(secondModel));
assertTrue("equals is not symmetric", thirdModel.equals(firstModel));
SmoothingModel thirdModel = copyModel(secondModel);
assertTrue("smoothing model is not equal to self", thirdModel.equals(thirdModel));
assertTrue("smoothing model is not equal to its copy", secondModel.equals(thirdModel));
assertThat("smoothing model copy's hashcode is different from original hashcode", secondModel.hashCode(),
equalTo(thirdModel.hashCode()));
assertTrue("equals is not transitive", firstModel.equals(thirdModel));
assertThat("smoothing model copy's hashcode is different from original hashcode", firstModel.hashCode(),
equalTo(thirdModel.hashCode()));
assertTrue("equals is not symmetric", thirdModel.equals(secondModel));
assertTrue("equals is not symmetric", thirdModel.equals(firstModel));
}
static SmoothingModel copyModel(SmoothingModel original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {
original.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class, original.getWriteableName());
SmoothingModel prototype = (SmoothingModel) namedWriteableRegistry.getPrototype(SmoothingModel.class,
original.getWriteableName());
return prototype.readFrom(in);
}
}

View File

@ -19,15 +19,16 @@
package org.elasticsearch.search.suggest.phrase;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff;
import static org.hamcrest.Matchers.instanceOf;
public class StupidBackoffModelTests extends SmoothingModelTestCase {
@Override
protected SmoothingModel createTestModel() {
return createRandomModel();
}
static SmoothingModel createRandomModel() {
return new StupidBackoff(randomDoubleBetween(0.0, 10.0, false));
}

View File

@ -0,0 +1,70 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.term;
import org.elasticsearch.common.io.stream.AbstractWriteableEnumTestCase;
import org.elasticsearch.search.suggest.SortBy;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
/**
* Test the {@link SortBy} enum.
*/
public class SortByTests extends AbstractWriteableEnumTestCase {
@Override
public void testValidOrdinals() {
assertThat(SortBy.SCORE.ordinal(), equalTo(0));
assertThat(SortBy.FREQUENCY.ordinal(), equalTo(1));
}
@Override
public void testFromString() {
assertThat(SortBy.resolve("score"), equalTo(SortBy.SCORE));
assertThat(SortBy.resolve("frequency"), equalTo(SortBy.FREQUENCY));
final String doesntExist = "doesnt_exist";
try {
SortBy.resolve(doesntExist);
fail("SortBy should not have an element " + doesntExist);
} catch (IllegalArgumentException e) {
}
try {
SortBy.resolve(null);
fail("SortBy.resolve on a null value should throw an exception.");
} catch (NullPointerException e) {
assertThat(e.getMessage(), equalTo("Input string is null"));
}
}
@Override
public void testWriteTo() throws IOException {
assertWriteToStream(SortBy.SCORE, 0);
assertWriteToStream(SortBy.FREQUENCY, 1);
}
@Override
public void testReadFrom() throws IOException {
assertReadFromStream(0, SortBy.SCORE);
assertReadFromStream(1, SortBy.FREQUENCY);
}
}

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.term;
import org.elasticsearch.common.io.stream.AbstractWriteableEnumTestCase;
import java.io.IOException;
import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl;
import static org.hamcrest.Matchers.equalTo;
/**
* Test for the {@link StringDistanceImpl} enum.
*/
public class StringDistanceImplTests extends AbstractWriteableEnumTestCase {
@Override
public void testValidOrdinals() {
assertThat(StringDistanceImpl.INTERNAL.ordinal(), equalTo(0));
assertThat(StringDistanceImpl.DAMERAU_LEVENSHTEIN.ordinal(), equalTo(1));
assertThat(StringDistanceImpl.LEVENSTEIN.ordinal(), equalTo(2));
assertThat(StringDistanceImpl.JAROWINKLER.ordinal(), equalTo(3));
assertThat(StringDistanceImpl.NGRAM.ordinal(), equalTo(4));
}
@Override
public void testFromString() {
assertThat(StringDistanceImpl.resolve("internal"), equalTo(StringDistanceImpl.INTERNAL));
assertThat(StringDistanceImpl.resolve("damerau_levenshtein"), equalTo(StringDistanceImpl.DAMERAU_LEVENSHTEIN));
assertThat(StringDistanceImpl.resolve("levenstein"), equalTo(StringDistanceImpl.LEVENSTEIN));
assertThat(StringDistanceImpl.resolve("jarowinkler"), equalTo(StringDistanceImpl.JAROWINKLER));
assertThat(StringDistanceImpl.resolve("ngram"), equalTo(StringDistanceImpl.NGRAM));
final String doesntExist = "doesnt_exist";
try {
StringDistanceImpl.resolve(doesntExist);
fail("StringDistanceImpl should not have an element " + doesntExist);
} catch (IllegalArgumentException e) {
}
try {
StringDistanceImpl.resolve(null);
fail("StringDistanceImpl.resolve on a null value should throw an exception.");
} catch (NullPointerException e) {
assertThat(e.getMessage(), equalTo("Input string is null"));
}
}
@Override
public void testWriteTo() throws IOException {
assertWriteToStream(StringDistanceImpl.INTERNAL, 0);
assertWriteToStream(StringDistanceImpl.DAMERAU_LEVENSHTEIN, 1);
assertWriteToStream(StringDistanceImpl.LEVENSTEIN, 2);
assertWriteToStream(StringDistanceImpl.JAROWINKLER, 3);
assertWriteToStream(StringDistanceImpl.NGRAM, 4);
}
@Override
public void testReadFrom() throws IOException {
assertReadFromStream(0, StringDistanceImpl.INTERNAL);
assertReadFromStream(1, StringDistanceImpl.DAMERAU_LEVENSHTEIN);
assertReadFromStream(2, StringDistanceImpl.LEVENSTEIN);
assertReadFromStream(3, StringDistanceImpl.JAROWINKLER);
assertReadFromStream(4, StringDistanceImpl.NGRAM);
}
}

View File

@ -0,0 +1,74 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.term;
import org.elasticsearch.common.io.stream.AbstractWriteableEnumTestCase;
import java.io.IOException;
import static org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
import static org.hamcrest.Matchers.equalTo;
/**
* Test the {@link SuggestMode} enum.
*/
public class SuggestModeTests extends AbstractWriteableEnumTestCase {
@Override
public void testValidOrdinals() {
assertThat(SuggestMode.MISSING.ordinal(), equalTo(0));
assertThat(SuggestMode.POPULAR.ordinal(), equalTo(1));
assertThat(SuggestMode.ALWAYS.ordinal(), equalTo(2));
}
@Override
public void testFromString() {
assertThat(SuggestMode.resolve("missing"), equalTo(SuggestMode.MISSING));
assertThat(SuggestMode.resolve("popular"), equalTo(SuggestMode.POPULAR));
assertThat(SuggestMode.resolve("always"), equalTo(SuggestMode.ALWAYS));
final String doesntExist = "doesnt_exist";
try {
SuggestMode.resolve(doesntExist);
fail("SuggestMode should not have an element " + doesntExist);
} catch (IllegalArgumentException e) {
}
try {
SuggestMode.resolve(null);
fail("SuggestMode.resolve on a null value should throw an exception.");
} catch (NullPointerException e) {
assertThat(e.getMessage(), equalTo("Input string is null"));
}
}
@Override
public void testWriteTo() throws IOException {
assertWriteToStream(SuggestMode.MISSING, 0);
assertWriteToStream(SuggestMode.POPULAR, 1);
assertWriteToStream(SuggestMode.ALWAYS, 2);
}
@Override
public void testReadFrom() throws IOException {
assertReadFromStream(0, SuggestMode.MISSING);
assertReadFromStream(1, SuggestMode.POPULAR);
assertReadFromStream(2, SuggestMode.ALWAYS);
}
}

View File

@ -0,0 +1,325 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.term;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.StringDistanceImpl;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
import java.io.IOException;
import java.util.Locale;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_ACCURACY;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_EDITS;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_INSPECTIONS;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MAX_TERM_FREQ;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_DOC_FREQ;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_MIN_WORD_LENGTH;
import static org.elasticsearch.search.suggest.DirectSpellcheckerSettings.DEFAULT_PREFIX_LENGTH;
import static org.hamcrest.Matchers.containsString;
/**
* Test the {@link TermSuggestionBuilder} class.
*/
public class TermSuggestionBuilderTests extends AbstractSuggestionBuilderTestCase<TermSuggestionBuilder> {
/**
* creates random suggestion builder, renders it to xContent and back to new instance that should be equal to original
*/
@Override
protected TermSuggestionBuilder randomSuggestionBuilder() {
return randomTermSuggestionBuilder();
}
/**
* Creates a random TermSuggestionBuilder
*/
public static TermSuggestionBuilder randomTermSuggestionBuilder() {
TermSuggestionBuilder testBuilder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
setCommonPropertiesOnRandomBuilder(testBuilder);
maybeSet(testBuilder::suggestMode, randomSuggestMode());
maybeSet(testBuilder::accuracy, randomFloat());
maybeSet(testBuilder::sort, randomSort());
maybeSet(testBuilder::stringDistance, randomStringDistance());
maybeSet(testBuilder::maxEdits, randomIntBetween(1, 2));
maybeSet(testBuilder::maxInspections, randomInt(Integer.MAX_VALUE));
maybeSet(testBuilder::maxTermFreq, randomFloat());
maybeSet(testBuilder::prefixLength, randomInt(Integer.MAX_VALUE));
maybeSet(testBuilder::minWordLength, randomInt(Integer.MAX_VALUE));
maybeSet(testBuilder::minDocFreq, randomFloat());
return testBuilder;
}
private static SuggestMode randomSuggestMode() {
final int randomVal = randomIntBetween(0, 2);
switch (randomVal) {
case 0: return SuggestMode.MISSING;
case 1: return SuggestMode.POPULAR;
case 2: return SuggestMode.ALWAYS;
default: throw new IllegalArgumentException("No suggest mode with an ordinal of " + randomVal);
}
}
private static SortBy randomSort() {
int randomVal = randomIntBetween(0, 1);
switch (randomVal) {
case 0: return SortBy.SCORE;
case 1: return SortBy.FREQUENCY;
default: throw new IllegalArgumentException("No sort mode with an ordinal of " + randomVal);
}
}
private static StringDistanceImpl randomStringDistance() {
int randomVal = randomIntBetween(0, 4);
switch (randomVal) {
case 0: return StringDistanceImpl.INTERNAL;
case 1: return StringDistanceImpl.DAMERAU_LEVENSHTEIN;
case 2: return StringDistanceImpl.LEVENSTEIN;
case 3: return StringDistanceImpl.JAROWINKLER;
case 4: return StringDistanceImpl.NGRAM;
default: throw new IllegalArgumentException("No string distance algorithm with an ordinal of " + randomVal);
}
}
@Override
protected void mutateSpecificParameters(TermSuggestionBuilder builder) throws IOException {
switch (randomIntBetween(0, 9)) {
case 0:
builder.suggestMode(randomValueOtherThan(builder.suggestMode(), () -> randomSuggestMode()));
break;
case 1:
builder.accuracy(randomValueOtherThan(builder.accuracy(), () -> randomFloat()));
break;
case 2:
builder.sort(randomValueOtherThan(builder.sort(), () -> randomSort()));
break;
case 3:
builder.stringDistance(randomValueOtherThan(builder.stringDistance(), () -> randomStringDistance()));
break;
case 4:
builder.maxEdits(randomValueOtherThan(builder.maxEdits(), () -> randomIntBetween(1, 2)));
break;
case 5:
builder.maxInspections(randomValueOtherThan(builder.maxInspections(), () -> randomInt(Integer.MAX_VALUE)));
break;
case 6:
builder.maxTermFreq(randomValueOtherThan(builder.maxTermFreq(), () -> randomFloat()));
break;
case 7:
builder.prefixLength(randomValueOtherThan(builder.prefixLength(), () -> randomInt(Integer.MAX_VALUE)));
break;
case 8:
builder.minWordLength(randomValueOtherThan(builder.minWordLength(), () -> randomInt(Integer.MAX_VALUE)));
break;
case 9:
builder.minDocFreq(randomValueOtherThan(builder.minDocFreq(), () -> randomFloat()));
break;
default:
break; // do nothing
}
}
public void testInvalidParameters() throws IOException {
// test missing field name
try {
new TermSuggestionBuilder(null);
fail("Should not allow null as field name");
} catch (NullPointerException e) {
assertEquals("suggestion requires a field name", e.getMessage());
}
// test emtpy field name
try {
new TermSuggestionBuilder("");
fail("Should not allow empty string as field name");
} catch (IllegalArgumentException e) {
assertEquals("suggestion field name is empty", e.getMessage());
}
TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
// test invalid accuracy values
try {
builder.accuracy(-0.5f);
fail("Should not allow accuracy to be set to a negative value.");
} catch (IllegalArgumentException e) {
}
try {
builder.accuracy(1.1f);
fail("Should not allow accuracy to be greater than 1.0.");
} catch (IllegalArgumentException e) {
}
// test invalid max edit distance values
try {
builder.maxEdits(0);
fail("Should not allow maxEdits to be less than 1.");
} catch (IllegalArgumentException e) {
}
try {
builder.maxEdits(-1);
fail("Should not allow maxEdits to be a negative value.");
} catch (IllegalArgumentException e) {
}
try {
builder.maxEdits(3);
fail("Should not allow maxEdits to be greater than 2.");
} catch (IllegalArgumentException e) {
}
// test invalid max inspections values
try {
builder.maxInspections(-1);
fail("Should not allow maxInspections to be a negative value.");
} catch (IllegalArgumentException e) {
}
// test invalid max term freq values
try {
builder.maxTermFreq(-0.5f);
fail("Should not allow max term freq to be a negative value.");
} catch (IllegalArgumentException e) {
}
try {
builder.maxTermFreq(1.5f);
fail("If max term freq is greater than 1, it must be a whole number.");
} catch (IllegalArgumentException e) {
}
try {
builder.maxTermFreq(2.0f); // this should be allowed
} catch (IllegalArgumentException e) {
fail("A max term freq greater than 1 that is a whole number should be allowed.");
}
// test invalid min doc freq values
try {
builder.minDocFreq(-0.5f);
fail("Should not allow min doc freq to be a negative value.");
} catch (IllegalArgumentException e) {
}
try {
builder.minDocFreq(1.5f);
fail("If min doc freq is greater than 1, it must be a whole number.");
} catch (IllegalArgumentException e) {
}
try {
builder.minDocFreq(2.0f); // this should be allowed
} catch (IllegalArgumentException e) {
fail("A min doc freq greater than 1 that is a whole number should be allowed.");
}
// test invalid min word length values
try {
builder.minWordLength(0);
fail("A min word length < 1 should not be allowed.");
} catch (IllegalArgumentException e) {
}
try {
builder.minWordLength(-1);
fail("Should not allow min word length to be a negative value.");
} catch (IllegalArgumentException e) {
}
// test invalid prefix length values
try {
builder.prefixLength(-1);
fail("Should not allow prefix length to be a negative value.");
} catch (IllegalArgumentException e) {
}
// test invalid size values
try {
builder.size(0);
fail("Size must be a positive value.");
} catch (IllegalArgumentException e) {
}
try {
builder.size(-1);
fail("Size must be a positive value.");
} catch (IllegalArgumentException e) {
}
// null values not allowed for enums
try {
builder.sort(null);
fail("Should not allow setting a null sort value.");
} catch (NullPointerException e) {
}
try {
builder.stringDistance(null);
fail("Should not allow setting a null string distance value.");
} catch (NullPointerException e) {
}
try {
builder.suggestMode(null);
fail("Should not allow setting a null suggest mode value.");
} catch (NullPointerException e) {
}
}
public void testDefaultValuesSet() {
TermSuggestionBuilder builder = new TermSuggestionBuilder(randomAsciiOfLengthBetween(2, 20));
assertEquals(DEFAULT_ACCURACY, builder.accuracy(), Float.MIN_VALUE);
assertEquals(DEFAULT_MAX_EDITS, builder.maxEdits());
assertEquals(DEFAULT_MAX_INSPECTIONS, builder.maxInspections());
assertEquals(DEFAULT_MAX_TERM_FREQ, builder.maxTermFreq(), Float.MIN_VALUE);
assertEquals(DEFAULT_MIN_DOC_FREQ, builder.minDocFreq(), Float.MIN_VALUE);
assertEquals(DEFAULT_MIN_WORD_LENGTH, builder.minWordLength());
assertEquals(DEFAULT_PREFIX_LENGTH, builder.prefixLength());
assertEquals(SortBy.SCORE, builder.sort());
assertEquals(StringDistanceImpl.INTERNAL, builder.stringDistance());
assertEquals(SuggestMode.MISSING, builder.suggestMode());
}
public void testMalformedJson() {
final String field = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT);
String suggest = "{\n" +
" \"bad-payload\" : {\n" +
" \"text\" : \"the amsterdma meetpu\",\n" +
" \"term\" : {\n" +
" \"field\" : { \"" + field + "\" : \"bad-object\" }\n" +
" }\n" +
" }\n" +
"}";
try {
final SuggestBuilder suggestBuilder = SuggestBuilder.fromXContent(newParseContext(suggest), suggesters);
fail("Should not have been able to create SuggestBuilder from malformed JSON: " + suggestBuilder);
} catch (Exception e) {
assertThat(e.getMessage(), containsString("parsing failed"));
}
}
private void assertSpellcheckerSettings(DirectSpellcheckerSettings oldSettings, DirectSpellcheckerSettings newSettings) {
final double delta = 0.0d;
// make sure the objects aren't the same
assertNotSame(oldSettings, newSettings);
// make sure the objects aren't null
assertNotNull(oldSettings);
assertNotNull(newSettings);
// and now, make sure they are equal..
assertEquals(oldSettings.accuracy(), newSettings.accuracy(), delta);
assertEquals(oldSettings.maxEdits(), newSettings.maxEdits());
assertEquals(oldSettings.maxInspections(), newSettings.maxInspections());
assertEquals(oldSettings.maxTermFreq(), newSettings.maxTermFreq(), delta);
assertEquals(oldSettings.minDocFreq(), newSettings.minDocFreq(), delta);
assertEquals(oldSettings.minWordLength(), newSettings.minWordLength());
assertEquals(oldSettings.prefixLength(), newSettings.prefixLength());
assertEquals(oldSettings.sort(), newSettings.sort());
assertEquals(oldSettings.stringDistance().getClass(), newSettings.stringDistance().getClass());
assertEquals(oldSettings.suggestMode().getClass(), newSettings.suggestMode().getClass());
}
}

View File

@ -215,3 +215,13 @@ The inner DirectCandidateGenerator class has been moved out to its own class cal
The `sortMode` setter in `FieldSortBuilder`, `GeoDistanceSortBuilder` and `ScriptSortBuilder` now
accept a `SortMode` enum instead of a String constant. Also the getter returns the same enum type.
===== SuggestBuilder
The `setText` method has been changed to `setGlobalText` to make the intent more clear, and a `getGlobalText` method has been added.
The `addSuggestion` method now required the user specified suggestion name, previously used in the ctor of each suggestion.
===== SuggestionBuilder
The `field` setter has been deleted. Instead the field name needs to be specified as constructor argument.

View File

@ -93,6 +93,7 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.groovy.GroovyPlugin;
import org.elasticsearch.search.action.SearchTransportService;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@ -445,7 +446,7 @@ public class IndicesRequestTests extends ESIntegTestCase {
String suggestAction = SuggestAction.NAME + "[s]";
interceptTransportActions(suggestAction);
SuggestRequest suggestRequest = new SuggestRequest(randomIndicesOrAliases());
SuggestRequest suggestRequest = new SuggestRequest(randomIndicesOrAliases()).suggest(new SuggestBuilder());
internalCluster().clientNodeClient().suggest(suggestRequest).actionGet();
clearInterceptedActions();

View File

@ -26,7 +26,6 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion;
import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion;
import static org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.candidateGenerator;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion;
@ -50,6 +49,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
import org.elasticsearch.ElasticsearchException;
@ -67,12 +67,17 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.mustache.MustachePlugin;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder;
import org.elasticsearch.search.suggest.phrase.Laplace;
import org.elasticsearch.search.suggest.phrase.LinearInterpolation;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.elasticsearch.search.suggest.phrase.StupidBackoff;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
@ -99,12 +104,11 @@ public class SuggestSearchTests extends ESIntegTestCase {
index("test", "type1", "4", "text", "abcc");
refresh();
TermSuggestionBuilder termSuggest = termSuggestion("test")
.suggestMode("always") // Always, otherwise the results can vary between requests.
.text("abcd")
.field("text");
TermSuggestionBuilder termSuggest = termSuggestion("text")
.suggestMode(TermSuggestionBuilder.SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
.text("abcd");
logger.info("--> run suggestions with one index");
searchSuggest( termSuggest);
searchSuggest("test", termSuggest);
createIndex("test_1");
ensureGreen();
@ -113,13 +117,12 @@ public class SuggestSearchTests extends ESIntegTestCase {
index("test_1", "type1", "3", "text", "ab bd");
index("test_1", "type1", "4", "text", "ab cc");
refresh();
termSuggest = termSuggestion("test")
.suggestMode("always") // Always, otherwise the results can vary between requests.
termSuggest = termSuggestion("text")
.suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
.text("ab cd")
.minWordLength(1)
.field("text");
.minWordLength(1);
logger.info("--> run suggestions with two indices");
searchSuggest( termSuggest);
searchSuggest("test", termSuggest);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
@ -140,14 +143,13 @@ public class SuggestSearchTests extends ESIntegTestCase {
index("test_2", "type1", "4", "text", "abcc");
refresh();
termSuggest = termSuggestion("test")
.suggestMode("always") // Always, otherwise the results can vary between requests.
termSuggest = termSuggestion("text")
.suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
.text("ab cd")
.minWordLength(1)
.field("text");
.minWordLength(1);
logger.info("--> run suggestions with three indices");
try {
searchSuggest( termSuggest);
searchSuggest("test", termSuggest);
fail(" can not suggest across multiple indices with different analysis chains");
} catch (ReduceSearchPhaseException ex) {
assertThat(ex.getCause(), instanceOf(IllegalStateException.class));
@ -160,14 +162,13 @@ public class SuggestSearchTests extends ESIntegTestCase {
}
termSuggest = termSuggestion("test")
.suggestMode("always") // Always, otherwise the results can vary between requests.
termSuggest = termSuggestion("text")
.suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
.text("ABCD")
.minWordLength(1)
.field("text");
.minWordLength(1);
logger.info("--> run suggestions with four indices");
try {
searchSuggest( termSuggest);
searchSuggest("test", termSuggest);
fail(" can not suggest across multiple indices with different analysis chains");
} catch (ReduceSearchPhaseException ex) {
assertThat(ex.getCause(), instanceOf(IllegalStateException.class));
@ -214,17 +215,27 @@ public class SuggestSearchTests extends ESIntegTestCase {
refresh();
DirectCandidateGeneratorBuilder generator = candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2);
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled")
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled")
.addCandidateGenerator(generator)
.gramSize(3);
Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion);
Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion);
assertSuggestion(searchSuggest, 0, "did_you_mean", "iced tea");
generator.suggestMode(null);
searchSuggest = searchSuggest( "ice tea", phraseSuggestion);
searchSuggest = searchSuggest( "ice tea", "did_you_mean", phraseSuggestion);
assertSuggestionSize(searchSuggest, 0, 0, "did_you_mean");
}
/**
* Creates a new {@link DirectCandidateGeneratorBuilder}
*
* @param field
* the field this candidate generator operates on.
*/
private DirectCandidateGeneratorBuilder candidateGenerator(String field) {
return new DirectCandidateGeneratorBuilder(field);
}
// see #2729
public void testSizeOneShard() throws Exception {
prepareCreate("test").setSettings(
@ -240,16 +251,15 @@ public class SuggestSearchTests extends ESIntegTestCase {
SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get();
assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue());
TermSuggestionBuilder termSuggestion = termSuggestion("test")
.suggestMode("always") // Always, otherwise the results can vary between requests.
TermSuggestionBuilder termSuggestion = termSuggestion("text")
.suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
.text("abcd")
.field("text")
.size(10);
Suggest suggest = searchSuggest( termSuggestion);
Suggest suggest = searchSuggest("test", termSuggestion);
assertSuggestion(suggest, 0, "test", 10, "abc0");
termSuggestion.text("abcd").shardSize(5);
suggest = searchSuggest( termSuggestion);
suggest = searchSuggest("test", termSuggestion);
assertSuggestion(suggest, 0, "test", 5, "abc0");
}
@ -283,21 +293,23 @@ public class SuggestSearchTests extends ESIntegTestCase {
client().prepareIndex("test", "type1").setSource("name", "I like ice cream."));
refresh();
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("did_you_mean").field("name.shingled")
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2))
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("name.shingled")
.addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2))
.gramSize(3);
Suggest searchSuggest = searchSuggest( "ice tea", phraseSuggestion);
Suggest searchSuggest = searchSuggest("ice tea", "did_you_mean", phraseSuggestion);
assertSuggestion(searchSuggest, 0, 0, "did_you_mean", "iced tea");
phraseSuggestion.field("nosuchField");
phraseSuggestion = phraseSuggestion("nosuchField")
.addCandidateGenerator(candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2))
.gramSize(3);
{
SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0);
searchBuilder.suggest(new SuggestBuilder().setText("tetsting sugestion").addSuggestion(phraseSuggestion));
searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion));
assertThrows(searchBuilder, SearchPhaseExecutionException.class);
}
{
SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0);
searchBuilder.suggest(new SuggestBuilder().setText("tetsting sugestion").addSuggestion(phraseSuggestion));
searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion));
assertThrows(searchBuilder, SearchPhaseExecutionException.class);
}
}
@ -315,15 +327,14 @@ public class SuggestSearchTests extends ESIntegTestCase {
SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get();
assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue());
TermSuggestionBuilder termSuggest = termSuggestion("test")
.suggestMode("always") // Always, otherwise the results can vary between requests.
.text("abcd")
.field("text");
Suggest suggest = searchSuggest( termSuggest);
TermSuggestionBuilder termSuggest = termSuggestion("text")
.suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
.text("abcd");
Suggest suggest = searchSuggest("test", termSuggest);
assertSuggestion(suggest, 0, "test", "aacd", "abbd", "abcc");
assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
suggest = searchSuggest( termSuggest);
suggest = searchSuggest("test", termSuggest);
assertSuggestion(suggest, 0, "test", "aacd","abbd", "abcc");
assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
}
@ -332,18 +343,17 @@ public class SuggestSearchTests extends ESIntegTestCase {
createIndex("test");
ensureGreen();
index("test", "type1", "1", "foo", "bar");
index("test", "type1", "1", "text", "bar");
refresh();
TermSuggestionBuilder termSuggest = termSuggestion("test")
.suggestMode("always") // Always, otherwise the results can vary between requests.
.text("abcd")
.field("text");
Suggest suggest = searchSuggest( termSuggest);
TermSuggestionBuilder termSuggest = termSuggestion("text")
.suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary between requests.
.text("abcd");
Suggest suggest = searchSuggest("test", termSuggest);
assertSuggestionSize(suggest, 0, 0, "test");
assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
suggest = searchSuggest( termSuggest);
suggest = searchSuggest("test", termSuggest);
assertSuggestionSize(suggest, 0, 0, "test");
assertThat(suggest.getSuggestion("test").getEntries().get(0).getText().string(), equalTo("abcd"));
}
@ -358,16 +368,17 @@ public class SuggestSearchTests extends ESIntegTestCase {
index("test", "typ1", "4", "field1", "prefix_abcc", "field2", "prefix_eggg");
refresh();
Suggest suggest = searchSuggest(
termSuggestion("size1")
.size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0)
.field("field1").suggestMode("always"),
termSuggestion("field2")
.field("field2").text("prefix_eeeh prefix_efgh")
.maxTermFreq(10).minDocFreq(0).suggestMode("always"),
termSuggestion("accuracy")
.field("field2").text("prefix_efgh").setAccuracy(1f)
.maxTermFreq(10).minDocFreq(0).suggestMode("always"));
Map<String, SuggestionBuilder<?>> suggestions = new HashMap<>();
suggestions.put("size1", termSuggestion("field1")
.size(1).text("prefix_abcd").maxTermFreq(10).prefixLength(1).minDocFreq(0)
.suggestMode(SuggestMode.ALWAYS));
suggestions.put("field2", termSuggestion("field2")
.text("prefix_eeeh prefix_efgh")
.maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
suggestions.put("accuracy", termSuggestion("field2")
.text("prefix_efgh").accuracy(1f)
.maxTermFreq(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
Suggest suggest = searchSuggest(null, 0, suggestions);
assertSuggestion(suggest, 0, "size1", "prefix_aacd");
assertThat(suggest.getSuggestion("field2").getEntries().get(0).getText().string(), equalTo("prefix_eeeh"));
assertSuggestion(suggest, 0, "field2", "prefix_efgh");
@ -401,17 +412,18 @@ public class SuggestSearchTests extends ESIntegTestCase {
}
refresh();
Suggest suggest = searchSuggest( "prefix_abcd",
termSuggestion("size3SortScoreFirst")
.size(3).minDocFreq(0).field("field1").suggestMode("always"),
termSuggestion("size10SortScoreFirst")
.size(10).minDocFreq(0).field("field1").suggestMode("always").shardSize(50),
termSuggestion("size3SortScoreFirstMaxEdits1")
.maxEdits(1)
.size(10).minDocFreq(0).field("field1").suggestMode("always"),
termSuggestion("size10SortFrequencyFirst")
.size(10).sort("frequency").shardSize(1000)
.minDocFreq(0).field("field1").suggestMode("always"));
Map<String, SuggestionBuilder<?>> suggestions = new HashMap<>();
suggestions.put("size3SortScoreFirst", termSuggestion("field1")
.size(3).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
suggestions.put("size10SortScoreFirst", termSuggestion("field1")
.size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS).shardSize(50));
suggestions.put("size3SortScoreFirstMaxEdits1", termSuggestion("field1")
.maxEdits(1)
.size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
suggestions.put("size10SortFrequencyFirst", termSuggestion("field1")
.size(10).sort(SortBy.FREQUENCY).shardSize(1000)
.minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
Suggest suggest = searchSuggest("prefix_abcd", 0, suggestions);
// The commented out assertions fail sometimes because suggestions are based off of shard frequencies instead of index frequencies.
assertSuggestion(suggest, 0, "size3SortScoreFirst", "prefix_aacd", "prefix_abcc", "prefix_accd");
@ -435,9 +447,9 @@ public class SuggestSearchTests extends ESIntegTestCase {
index("test", "typ1", "1", "body", "this is a test");
refresh();
Suggest searchSuggest = searchSuggest( "a an the",
phraseSuggestion("simple_phrase").field("body").gramSize(1)
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always"))
Suggest searchSuggest = searchSuggest( "a an the", "simple_phrase",
phraseSuggestion("body").gramSize(1)
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
.size(1));
assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
}
@ -471,15 +483,15 @@ public class SuggestSearchTests extends ESIntegTestCase {
index("test", "type1", "3", "body", "hello words");
refresh();
Suggest searchSuggest = searchSuggest( "hello word",
phraseSuggestion("simple_phrase").field("body")
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always"))
Suggest searchSuggest = searchSuggest( "hello word", "simple_phrase",
phraseSuggestion("body")
.addCandidateGenerator(candidateGenerator("body").prefixLength(4).minWordLength(1).suggestMode("always"))
.size(1).confidence(1.0f));
assertSuggestion(searchSuggest, 0, "simple_phrase", "hello words");
searchSuggest = searchSuggest( "hello word",
phraseSuggestion("simple_phrase").field("body")
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always"))
searchSuggest = searchSuggest( "hello word", "simple_phrase",
phraseSuggestion("body")
.addCandidateGenerator(candidateGenerator("body").prefixLength(2).minWordLength(1).suggestMode("always"))
.size(1).confidence(1.0f));
assertSuggestion(searchSuggest, 0, "simple_phrase", "hello world");
}
@ -526,88 +538,87 @@ public class SuggestSearchTests extends ESIntegTestCase {
}
refresh();
PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("simple_phrase")
.field("bigram").gramSize(2).analyzer("body")
PhraseSuggestionBuilder phraseSuggest = phraseSuggestion("bigram").gramSize(2).analyzer("body")
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
.size(1);
Suggest searchSuggest = searchSuggest( "american ame", phraseSuggest);
Suggest searchSuggest = searchSuggest( "american ame", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace");
assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("american ame"));
phraseSuggest.realWordErrorLikelihood(0.95f);
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
// Check the "text" field this one time.
assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel"));
// Ask for highlighting
phraseSuggest.highlight("<em>", "</em>");
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getOptions().get(0).getHighlighted().string(), equalTo("<em>xorr</em> the <em>god</em> jewel"));
// pass in a correct phrase
phraseSuggest.highlight(null, null).confidence(0f).size(1).maxErrors(0.5f);
searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest);
searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
// pass in a correct phrase - set confidence to 2
phraseSuggest.confidence(2f);
searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest);
searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest);
assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
// pass in a correct phrase - set confidence to 0.99
phraseSuggest.confidence(0.99f);
searchSuggest = searchSuggest( "Xorr the God-Jewel", phraseSuggest);
searchSuggest = searchSuggest( "Xorr the God-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
//test reverse suggestions with pre & post filter
phraseSuggest
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
.addCandidateGenerator(candidateGenerator("body_reverse").minWordLength(1).suggestMode("always").preFilter("reverse").postFilter("reverse"));
searchSuggest = searchSuggest( "xor the yod-Jewel", phraseSuggest);
searchSuggest = searchSuggest( "xor the yod-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
// set all mass to trigrams (not indexed)
phraseSuggest.clearCandidateGenerators()
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"))
.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(1,0,0));
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
.smoothingModel(new LinearInterpolation(1,0,0));
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
// set all mass to bigrams
phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0,1,0));
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
phraseSuggest.smoothingModel(new LinearInterpolation(0,1,0));
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
// distribute mass
phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2));
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2));
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
searchSuggest = searchSuggest( "american ame", phraseSuggest);
searchSuggest = searchSuggest( "american ame", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "american ace");
// try all smoothing methods
phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.LinearInterpolation(0.4,0.4,0.2));
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
phraseSuggest.smoothingModel(new LinearInterpolation(0.4,0.4,0.2));
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.Laplace(0.2));
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
phraseSuggest.smoothingModel(new Laplace(0.2));
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
phraseSuggest.smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1));
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
phraseSuggest.smoothingModel(new StupidBackoff(0.1));
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase",phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
// check tokenLimit
phraseSuggest.smoothingModel(null).tokenLimit(4);
searchSuggest = searchSuggest( "Xor the Got-Jewel", phraseSuggest);
searchSuggest = searchSuggest( "Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
phraseSuggest.tokenLimit(15).smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1));
searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", phraseSuggest);
phraseSuggest.tokenLimit(15).smoothingModel(new StupidBackoff(0.1));
searchSuggest = searchSuggest( "Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel", "simple_phrase", phraseSuggest);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel xorr the god jewel xorr the god jewel");
// Check the name this time because we're repeating it which is funky
assertThat(searchSuggest.getSuggestion("simple_phrase").getEntries().get(0).getText().string(), equalTo("Xor the Got-Jewel Xor the Got-Jewel Xor the Got-Jewel"));
@ -663,22 +674,21 @@ public class SuggestSearchTests extends ESIntegTestCase {
index("test", "type1", "2", "body", line, "body_reverse", line, "bigram", line);
refresh();
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase")
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram")
.realWordErrorLikelihood(0.95f)
.field("bigram")
.gramSize(2)
.analyzer("body")
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(1).accuracy(0.1f))
.smoothingModel(new PhraseSuggestionBuilder.StupidBackoff(0.1))
.smoothingModel(new StupidBackoff(0.1))
.maxErrors(1.0f)
.size(5);
Suggest searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion);
Suggest searchSuggest = searchSuggest("Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion);
assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
// we allow a size of 2 now on the shard generator level so "god" will be found since it's LD2
phraseSuggestion.clearCandidateGenerators()
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).prefixLength(1).suggestMode("always").size(2).accuracy(0.1f));
searchSuggest = searchSuggest( "Xorr the Gut-Jewel", phraseSuggestion);
searchSuggest = searchSuggest( "Xorr the Gut-Jewel", "simple_phrase", phraseSuggestion);
assertSuggestion(searchSuggest, 0, "simple_phrase", "xorr the god jewel");
}
@ -723,51 +733,56 @@ public class SuggestSearchTests extends ESIntegTestCase {
NumShards numShards = getNumShards("test");
// Lets make sure some things throw exceptions
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("simple_phrase")
.field("bigram")
PhraseSuggestionBuilder phraseSuggestion = phraseSuggestion("bigram")
.analyzer("body")
.addCandidateGenerator(candidateGenerator("does_not_exist").minWordLength(1).suggestMode("always"))
.realWordErrorLikelihood(0.95f)
.maxErrors(0.5f)
.size(1);
Map<String, SuggestionBuilder<?>> suggestion = new HashMap<>();
suggestion.put("simple_phrase", phraseSuggestion);
try {
searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion);
searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion);
fail("field does not exists");
} catch (SearchPhaseExecutionException e) {}
phraseSuggestion.clearCandidateGenerators().analyzer(null);
try {
searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion);
searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion);
fail("analyzer does only produce ngrams");
} catch (SearchPhaseExecutionException e) {
}
phraseSuggestion.analyzer("bigram");
try {
searchSuggest( "Xor the Got-Jewel", numShards.numPrimaries, phraseSuggestion);
searchSuggest("Xor the Got-Jewel", numShards.numPrimaries, suggestion);
fail("analyzer does only produce ngrams");
} catch (SearchPhaseExecutionException e) {
}
// Now we'll make sure some things don't
phraseSuggestion.forceUnigrams(false);
searchSuggest( "Xor the Got-Jewel", phraseSuggestion);
searchSuggest( "Xor the Got-Jewel", 0, suggestion);
// Field doesn't produce unigrams but the analyzer does
phraseSuggestion.forceUnigrams(true).field("bigram").analyzer("ngram");
searchSuggest( "Xor the Got-Jewel",
phraseSuggestion);
phraseSuggestion.forceUnigrams(true).analyzer("ngram");
searchSuggest( "Xor the Got-Jewel", 0, suggestion);
phraseSuggestion.field("ngram").analyzer("myDefAnalyzer")
phraseSuggestion = phraseSuggestion("ngram")
.analyzer("myDefAnalyzer")
.forceUnigrams(true)
.realWordErrorLikelihood(0.95f)
.maxErrors(0.5f)
.size(1)
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always"));
Suggest suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion);
Suggest suggest = searchSuggest( "Xor the Got-Jewel", 0, suggestion);
// "xorr the god jewel" and and "xorn the god jewel" have identical scores (we are only using unigrams to score), so we tie break by
// earlier term (xorn):
assertSuggestion(suggest, 0, "simple_phrase", "xorn the god jewel");
phraseSuggestion.analyzer(null);
suggest = searchSuggest( "Xor the Got-Jewel", phraseSuggestion);
suggest = searchSuggest( "Xor the Got-Jewel", 0, suggestion);
// In this case xorr has a better score than xorn because we set the field back to the default (my_shingle2) analyzer, so the
// probability that the term is not in the dictionary but is NOT a misspelling is relatively high in this case compared to the
@ -782,9 +797,9 @@ public class SuggestSearchTests extends ESIntegTestCase {
client().prepareIndex("test", "type1", "2").setSource("field1", "foobar2").setRouting("2"),
client().prepareIndex("test", "type1", "3").setSource("field1", "foobar3").setRouting("3"));
Suggest suggest = searchSuggest( "foobar",
termSuggestion("simple")
.size(10).minDocFreq(0).field("field1").suggestMode("always"));
Suggest suggest = searchSuggest( "foobar", "simple",
termSuggestion("field1")
.size(10).minDocFreq(0).suggestMode(SuggestMode.ALWAYS));
ElasticsearchAssertions.assertSuggestionSize(suggest, 0, 3, "simple");
}
@ -823,15 +838,15 @@ public class SuggestSearchTests extends ESIntegTestCase {
// When searching on a shard with a non existing mapping, we should fail
SearchRequestBuilder request = client().prepareSearch().setSize(0)
.suggest(
new SuggestBuilder().setText("tetsting sugestion").addSuggestion(
phraseSuggestion("did_you_mean").field("fielddoesnotexist").maxErrors(5.0f)));
new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
phraseSuggestion("fielddoesnotexist").maxErrors(5.0f)));
assertThrows(request, SearchPhaseExecutionException.class);
// When searching on a shard which does not hold yet any document of an existing type, we should not fail
SearchResponse searchResponse = client().prepareSearch().setSize(0)
.suggest(
new SuggestBuilder().setText("tetsting sugestion").addSuggestion(
phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f)))
new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
phraseSuggestion("name").maxErrors(5.0f)))
.get();
ElasticsearchAssertions.assertNoFailures(searchResponse);
ElasticsearchAssertions.assertSuggestion(searchResponse.getSuggest(), 0, 0, "did_you_mean", "testing suggestions");
@ -869,8 +884,8 @@ public class SuggestSearchTests extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setSize(0)
.suggest(
new SuggestBuilder().setText("tetsting sugestion").addSuggestion(
phraseSuggestion("did_you_mean").field("name").maxErrors(5.0f)))
new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean",
phraseSuggestion("name").maxErrors(5.0f)))
.get();
assertNoFailures(searchResponse);
@ -927,17 +942,15 @@ public class SuggestSearchTests extends ESIntegTestCase {
}
refresh();
Suggest searchSuggest = searchSuggest("nobel prize", phraseSuggestion("simple_phrase")
.field("body")
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
Suggest searchSuggest = searchSuggest("nobel prize", "simple_phrase", phraseSuggestion("body")
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
.confidence(2f)
.maxErrors(5f)
.size(1));
assertSuggestionSize(searchSuggest, 0, 0, "simple_phrase");
searchSuggest = searchSuggest("noble prize", phraseSuggestion("simple_phrase")
.field("body")
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
searchSuggest = searchSuggest("noble prize", "simple_phrase", phraseSuggestion("body")
.addCandidateGenerator(candidateGenerator("body").minWordLength(1).suggestMode("always").maxTermFreq(.99f))
.confidence(2f)
.maxErrors(5f)
.size(1));
@ -1067,8 +1080,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
indexRandom(true, builders);
PhraseSuggestionBuilder suggest = phraseSuggestion("title")
.field("title")
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title")
.addCandidateGenerator(candidateGenerator("title")
.suggestMode("always")
.maxTermFreq(.99f)
.size(1000) // Setting a silly high size helps of generate a larger list of candidates for testing.
@ -1078,13 +1090,13 @@ public class SuggestSearchTests extends ESIntegTestCase {
.maxErrors(2f)
.shardSize(30000)
.size(30000);
Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest);
Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options
suggest.size(1);
long start = System.currentTimeMillis();
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
long total = System.currentTimeMillis() - start;
assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006");
// assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging
@ -1132,8 +1144,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
// suggest without collate
PhraseSuggestionBuilder suggest = phraseSuggestion("title")
.field("title")
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title")
.addCandidateGenerator(candidateGenerator("title")
.suggestMode("always")
.maxTermFreq(.99f)
.size(10)
@ -1143,7 +1154,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
.maxErrors(2f)
.shardSize(30000)
.size(10);
Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", suggest);
Suggest searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest);
assertSuggestionSize(searchSuggest, 0, 10, "title");
// suggest with collate
@ -1156,11 +1167,11 @@ public class SuggestSearchTests extends ESIntegTestCase {
.string();
PhraseSuggestionBuilder filteredQuerySuggest = suggest.collateQuery(filterString);
filteredQuerySuggest.collateParams(Collections.singletonMap("field", "title"));
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredQuerySuggest);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredQuerySuggest);
assertSuggestionSize(searchSuggest, 0, 2, "title");
// collate suggest with no result (boundary case)
searchSuggest = searchSuggest("Elections of Representatives Parliament", filteredQuerySuggest);
searchSuggest = searchSuggest("Elections of Representatives Parliament", "title", filteredQuerySuggest);
assertSuggestionSize(searchSuggest, 0, 0, "title");
NumShards numShards = getNumShards("test");
@ -1174,8 +1185,10 @@ public class SuggestSearchTests extends ESIntegTestCase {
.endObject()
.string();
PhraseSuggestionBuilder incorrectFilteredSuggest = suggest.collateQuery(incorrectFilterString);
Map<String, SuggestionBuilder<?>> namedSuggestion = new HashMap<>();
namedSuggestion.put("my_title_suggestion", incorrectFilteredSuggest);
try {
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, incorrectFilteredSuggest);
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion);
fail("Post query error has been swallowed");
} catch(ElasticsearchException e) {
// expected
@ -1191,7 +1204,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
.string();
PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(filterStringAsFilter);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", filteredFilterSuggest);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", filteredFilterSuggest);
assertSuggestionSize(searchSuggest, 0, 2, "title");
// collate suggest with bad query
@ -1205,7 +1218,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
PhraseSuggestionBuilder in = suggest.collateQuery(filterStr);
try {
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, in);
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion);
fail("Post filter error has been swallowed");
} catch(ElasticsearchException e) {
//expected
@ -1223,7 +1236,7 @@ public class SuggestSearchTests extends ESIntegTestCase {
PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams);
try {
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, phraseSuggestWithNoParams);
searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion);
fail("Malformed query (lack of additional params) should fail");
} catch (ElasticsearchException e) {
// expected
@ -1235,33 +1248,35 @@ public class SuggestSearchTests extends ESIntegTestCase {
params.put("query_field", "title");
PhraseSuggestionBuilder phraseSuggestWithParams = suggest.collateQuery(collateWithParams).collateParams(params);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParams);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", phraseSuggestWithParams);
assertSuggestionSize(searchSuggest, 0, 2, "title");
// collate query request with prune set to true
PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateQuery(collateWithParams).collateParams(params).collatePrune(true);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", phraseSuggestWithParamsAndReturn);
assertSuggestionSize(searchSuggest, 0, 10, "title");
assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2);
}
protected Suggest searchSuggest(SuggestionBuilder<?>... suggestion) {
return searchSuggest(null, suggestion);
protected Suggest searchSuggest(String name, SuggestionBuilder<?> suggestion) {
return searchSuggest(null, name, suggestion);
}
protected Suggest searchSuggest(String suggestText, SuggestionBuilder<?>... suggestions) {
return searchSuggest(suggestText, 0, suggestions);
protected Suggest searchSuggest(String suggestText, String name, SuggestionBuilder<?> suggestion) {
Map<String, SuggestionBuilder<?>> map = new HashMap<>();
map.put(name, suggestion);
return searchSuggest(suggestText, 0, map);
}
protected Suggest searchSuggest(String suggestText, int expectShardsFailed, SuggestionBuilder<?>... suggestions) {
protected Suggest searchSuggest(String suggestText, int expectShardsFailed, Map<String, SuggestionBuilder<?>> suggestions) {
if (randomBoolean()) {
SearchRequestBuilder builder = client().prepareSearch().setSize(0);
SuggestBuilder suggestBuilder = new SuggestBuilder();
if (suggestText != null) {
suggestBuilder.setText(suggestText);
suggestBuilder.setGlobalText(suggestText);
}
for (SuggestionBuilder<?> suggestion : suggestions) {
suggestBuilder.addSuggestion(suggestion);
for (Entry<String, SuggestionBuilder<?>> suggestion : suggestions.entrySet()) {
suggestBuilder.addSuggestion(suggestion.getKey(), suggestion.getValue());
}
builder.suggest(suggestBuilder);
SearchResponse actionGet = builder.execute().actionGet();
@ -1272,8 +1287,8 @@ public class SuggestSearchTests extends ESIntegTestCase {
if (suggestText != null) {
builder.setSuggestText(suggestText);
}
for (SuggestionBuilder<?> suggestion : suggestions) {
builder.addSuggestion(suggestion);
for (Entry<String, SuggestionBuilder<?>> suggestion : suggestions.entrySet()) {
builder.addSuggestion(suggestion.getKey(), suggestion.getValue());
}
SuggestResponse actionGet = builder.execute().actionGet();

View File

@ -19,6 +19,8 @@
package org.elasticsearch.index.reindex;
import java.io.IOException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.TransportAction;
@ -33,23 +35,25 @@ import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.tasks.LoggingTaskListener;
import org.elasticsearch.tasks.Task;
import java.io.IOException;
public abstract class AbstractBaseReindexRestHandler<Request extends ActionRequest<Request>, Response extends BulkIndexByScrollResponse,
TA extends TransportAction<Request, Response>> extends BaseRestHandler {
protected final IndicesQueriesRegistry indicesQueriesRegistry;
protected final AggregatorParsers aggParsers;
protected final Suggesters suggesters;
private final ClusterService clusterService;
private final TA action;
protected AbstractBaseReindexRestHandler(Settings settings, Client client,
IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, ClusterService clusterService, TA action) {
IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters,
ClusterService clusterService, TA action) {
super(settings, client);
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.aggParsers = aggParsers;
this.suggesters = suggesters;
this.clusterService = clusterService;
this.action = action;
}

View File

@ -19,6 +19,14 @@
package org.elasticsearch.index.reindex;
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
@ -43,14 +51,7 @@ import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
import org.elasticsearch.search.suggest.Suggesters;
/**
* Expose IndexBySearchRequest over rest.
@ -76,7 +77,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
builder.map(source);
parser = parser.contentType().xContent().createParser(builder.bytes());
context.queryParseContext.reset(parser);
search.source().parseXContent(parser, context.queryParseContext, context.aggParsers);
search.source().parseXContent(parser, context.queryParseContext, context.aggParsers, context.suggesters);
};
ObjectParser<IndexRequest, Void> destParser = new ObjectParser<>("dest");
@ -102,9 +103,9 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
@Inject
public RestReindexAction(Settings settings, RestController controller, Client client,
IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, ClusterService clusterService,
TransportReindexAction action) {
super(settings, client, indicesQueriesRegistry, aggParsers, clusterService, action);
IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters,
ClusterService clusterService, TransportReindexAction action) {
super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action);
controller.registerHandler(POST, "/_reindex", this);
}
@ -118,7 +119,8 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
ReindexRequest internalRequest = new ReindexRequest(new SearchRequest(), new IndexRequest());
try (XContentParser xcontent = XContentFactory.xContent(request.content()).createParser(request.content())) {
PARSER.parse(xcontent, internalRequest, new ReindexParseContext(new QueryParseContext(indicesQueriesRegistry), aggParsers));
PARSER.parse(xcontent, internalRequest, new ReindexParseContext(new QueryParseContext(indicesQueriesRegistry), aggParsers,
suggesters));
} catch (ParsingException e) {
logger.warn("Bad request", e);
badRequest(channel, e.getDetailedMessage());
@ -170,10 +172,13 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
private class ReindexParseContext {
private final QueryParseContext queryParseContext;
private final AggregatorParsers aggParsers;
private final Suggesters suggesters;
public ReindexParseContext(QueryParseContext queryParseContext, AggregatorParsers aggParsers) {
public ReindexParseContext(QueryParseContext queryParseContext, AggregatorParsers aggParsers,
Suggesters suggesters) {
this.queryParseContext = queryParseContext;
this.aggParsers = aggParsers;
this.suggesters = suggesters;
}
}
}

View File

@ -19,6 +19,12 @@
package org.elasticsearch.index.reindex;
import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES;
import static org.elasticsearch.index.reindex.RestReindexAction.parseCommon;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import java.util.Map;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterService;
@ -38,20 +44,15 @@ import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import java.util.Map;
import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_ALL_MATCHES;
import static org.elasticsearch.index.reindex.RestReindexAction.parseCommon;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import org.elasticsearch.search.suggest.Suggesters;
public class RestUpdateByQueryAction extends
AbstractBaseReindexRestHandler<UpdateByQueryRequest, BulkIndexByScrollResponse, TransportUpdateByQueryAction> {
@Inject
public RestUpdateByQueryAction(Settings settings, RestController controller, Client client,
IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, ClusterService clusterService,
TransportUpdateByQueryAction action) {
super(settings, client, indicesQueriesRegistry, aggParsers, clusterService, action);
IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters,
ClusterService clusterService, TransportUpdateByQueryAction action) {
super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action);
controller.registerHandler(POST, "/{index}/_update_by_query", this);
controller.registerHandler(POST, "/{index}/{type}/_update_by_query", this);
}
@ -96,7 +97,7 @@ public class RestUpdateByQueryAction extends
}
}
RestSearchAction.parseSearchRequest(internalRequest.getSearchRequest(), indicesQueriesRegistry, request,
parseFieldMatcher, aggParsers, bodyContent);
parseFieldMatcher, aggParsers, suggesters, bodyContent);
String conflicts = request.param("conflicts");
if (conflicts != null) {

View File

@ -33,6 +33,7 @@ import org.elasticsearch.search.dfs.DfsPhase;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.query.QueryPhase;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.HashMap;
@ -69,9 +70,9 @@ public class MockSearchService extends SearchService {
public MockSearchService(Settings settings, ClusterSettings clusterSettings, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ScriptService scriptService, PageCacheRecycler pageCacheRecycler,
BigArrays bigArrays, DfsPhase dfsPhase, QueryPhase queryPhase, FetchPhase fetchPhase,
AggregatorParsers aggParsers) {
AggregatorParsers aggParsers, Suggesters suggesters) {
super(settings, clusterSettings, clusterService, indicesService, threadPool, scriptService, pageCacheRecycler, bigArrays, dfsPhase,
queryPhase, fetchPhase, aggParsers);
queryPhase, fetchPhase, aggParsers, suggesters);
}
@Override

View File

@ -76,6 +76,8 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
import static org.hamcrest.Matchers.equalTo;
@ -397,6 +399,26 @@ public abstract class ESTestCase extends LuceneTestCase {
return randomTimeValue(1, 1000);
}
/**
* helper to randomly perform on <code>consumer</code> with <code>value</code>
*/
public static <T> void maybeSet(Consumer<T> consumer, T value) {
if (randomBoolean()) {
consumer.accept(value);
}
}
/**
* helper to get a random value in a certain range that's different from the input
*/
public static <T> T randomValueOtherThan(T input, Supplier<T> randomSupplier) {
T randomValue = null;
do {
randomValue = randomSupplier.get();
} while (randomValue.equals(input));
return randomValue;
}
/**
* Runs the code block for 10 seconds waiting for no assertion to trip.
*/