Merge branch 'master' into deprecation/sort-option-reverse-removal
Conflicts: core/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java core/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java
This commit is contained in:
commit
08d989d9b6
|
@ -39,9 +39,6 @@ class PluginPropertiesExtension {
|
|||
@Input
|
||||
String classname
|
||||
|
||||
@Input
|
||||
boolean isolated = true
|
||||
|
||||
PluginPropertiesExtension(Project project) {
|
||||
name = project.name
|
||||
version = project.version
|
||||
|
|
|
@ -54,12 +54,6 @@ class PluginPropertiesTask extends Copy {
|
|||
if (extension.classname == null) {
|
||||
throw new InvalidUserDataException('classname is a required setting for esplugin')
|
||||
}
|
||||
doFirst {
|
||||
if (extension.isolated == false) {
|
||||
String warning = "WARNING: Disabling plugin isolation in ${project.path} is deprecated and will be removed in the future"
|
||||
logger.warn("${'=' * warning.length()}\n${warning}\n${'=' * warning.length()}")
|
||||
}
|
||||
}
|
||||
// configure property substitution
|
||||
from(templateFile)
|
||||
into(generatedResourcesDir)
|
||||
|
@ -80,7 +74,6 @@ class PluginPropertiesTask extends Copy {
|
|||
'version': stringSnap(extension.version),
|
||||
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
|
||||
'javaVersion': project.targetCompatibility as String,
|
||||
'isolated': extension.isolated as String,
|
||||
'classname': extension.classname
|
||||
]
|
||||
}
|
||||
|
|
|
@ -660,7 +660,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]internal[/\\]InternalSettingsPreparer.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorQuery.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]DummyPluginInfo.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]InstallPluginCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]RemovePluginCommand.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]RepositoriesModule.java" checks="LineLength" />
|
||||
|
|
|
@ -38,12 +38,3 @@ java.version=${javaVersion}
|
|||
#
|
||||
# 'elasticsearch.version' version of elasticsearch compiled against
|
||||
elasticsearch.version=${elasticsearchVersion}
|
||||
#
|
||||
### deprecated elements for jvm plugins :
|
||||
#
|
||||
# 'isolated': true if the plugin should have its own classloader.
|
||||
# passing false is deprecated, and only intended to support plugins
|
||||
# that have hard dependencies against each other. If this is
|
||||
# not specified, then the plugin is isolated by default.
|
||||
isolated=${isolated}
|
||||
#
|
|
@ -1,4 +1,4 @@
|
|||
elasticsearch = 5.0.0
|
||||
elasticsearch = 5.0.0-alpha1
|
||||
lucene = 6.0.0-snapshot-f0aa4fc
|
||||
|
||||
# optional dependencies
|
||||
|
|
|
@ -64,9 +64,9 @@ public class Version {
|
|||
public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
|
||||
public static final int V_2_3_0_ID = 2030099;
|
||||
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_5_0_0_ID = 5000099;
|
||||
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final Version CURRENT = V_5_0_0;
|
||||
public static final int V_5_0_0_alpha1_ID = 5000001;
|
||||
public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final Version CURRENT = V_5_0_0_alpha1;
|
||||
|
||||
static {
|
||||
assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to ["
|
||||
|
@ -79,8 +79,8 @@ public class Version {
|
|||
|
||||
public static Version fromId(int id) {
|
||||
switch (id) {
|
||||
case V_5_0_0_ID:
|
||||
return V_5_0_0;
|
||||
case V_5_0_0_alpha1_ID:
|
||||
return V_5_0_0_alpha1;
|
||||
case V_2_3_0_ID:
|
||||
return V_2_3_0;
|
||||
case V_2_2_1_ID:
|
||||
|
|
|
@ -173,8 +173,6 @@ import org.elasticsearch.action.search.TransportClearScrollAction;
|
|||
import org.elasticsearch.action.search.TransportMultiSearchAction;
|
||||
import org.elasticsearch.action.search.TransportSearchAction;
|
||||
import org.elasticsearch.action.search.TransportSearchScrollAction;
|
||||
import org.elasticsearch.action.suggest.SuggestAction;
|
||||
import org.elasticsearch.action.suggest.TransportSuggestAction;
|
||||
import org.elasticsearch.action.support.ActionFilter;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.AutoCreateIndex;
|
||||
|
@ -320,7 +318,6 @@ public class ActionModule extends AbstractModule {
|
|||
registerAction(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class,
|
||||
TransportShardMultiTermsVectorAction.class);
|
||||
registerAction(DeleteAction.INSTANCE, TransportDeleteAction.class);
|
||||
registerAction(SuggestAction.INSTANCE, TransportSuggestAction.class);
|
||||
registerAction(UpdateAction.INSTANCE, TransportUpdateAction.class);
|
||||
registerAction(MultiGetAction.INSTANCE, TransportMultiGetAction.class,
|
||||
TransportShardMultiGetAction.class);
|
||||
|
|
|
@ -84,7 +84,7 @@ public class TransportCancelTasksAction extends TransportTasksAction<Cancellable
|
|||
}
|
||||
|
||||
protected void processTasks(CancelTasksRequest request, Consumer<CancellableTask> operation) {
|
||||
if (request.getTaskId().isSet() == false) {
|
||||
if (request.getTaskId().isSet()) {
|
||||
// we are only checking one task, we can optimize it
|
||||
CancellableTask task = taskManager.getCancellableTask(request.getTaskId().getId());
|
||||
if (task != null) {
|
||||
|
|
|
@ -178,7 +178,7 @@ public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
|
|||
}
|
||||
builder.dateValueField("start_time_in_millis", "start_time", startTime);
|
||||
builder.timeValueField("running_time_in_nanos", "running_time", runningTimeNanos, TimeUnit.NANOSECONDS);
|
||||
if (parentTaskId.isSet() == false) {
|
||||
if (parentTaskId.isSet()) {
|
||||
builder.field("parent_task_id", parentTaskId.toString());
|
||||
}
|
||||
return builder;
|
||||
|
|
|
@ -84,7 +84,13 @@ public class TransportListTasksAction extends TransportTasksAction<Task, ListTas
|
|||
long timeoutTime = System.nanoTime() + timeout.nanos();
|
||||
super.processTasks(request, operation.andThen((Task t) -> {
|
||||
while (System.nanoTime() - timeoutTime < 0) {
|
||||
if (taskManager.getTask(t.getId()) == null) {
|
||||
Task task = taskManager.getTask(t.getId());
|
||||
if (task == null) {
|
||||
return;
|
||||
}
|
||||
if (task.getAction().startsWith(ListTasksAction.NAME)) {
|
||||
// It doesn't make sense to wait for List Tasks and it can cause an infinite loop of the task waiting
|
||||
// for itself of one of its child tasks
|
||||
return;
|
||||
}
|
||||
try {
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.elasticsearch.index.search.stats.SearchStats;
|
|||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.index.suggest.stats.SuggestStats;
|
||||
import org.elasticsearch.index.translog.TranslogStats;
|
||||
import org.elasticsearch.index.warmer.WarmerStats;
|
||||
import org.elasticsearch.indices.IndicesQueryCache;
|
||||
|
@ -109,7 +108,7 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
translog = new TranslogStats();
|
||||
break;
|
||||
case Suggest:
|
||||
suggest = new SuggestStats();
|
||||
// skip
|
||||
break;
|
||||
case RequestCache:
|
||||
requestCache = new RequestCacheStats();
|
||||
|
@ -177,7 +176,7 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
translog = indexShard.translogStats();
|
||||
break;
|
||||
case Suggest:
|
||||
suggest = indexShard.suggestStats();
|
||||
// skip
|
||||
break;
|
||||
case RequestCache:
|
||||
requestCache = indexShard.requestCache().stats();
|
||||
|
@ -236,9 +235,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
@Nullable
|
||||
public TranslogStats translog;
|
||||
|
||||
@Nullable
|
||||
public SuggestStats suggest;
|
||||
|
||||
@Nullable
|
||||
public RequestCacheStats requestCache;
|
||||
|
||||
|
@ -367,14 +363,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
} else {
|
||||
translog.add(stats.getTranslog());
|
||||
}
|
||||
if (suggest == null) {
|
||||
if (stats.getSuggest() != null) {
|
||||
suggest = new SuggestStats();
|
||||
suggest.add(stats.getSuggest());
|
||||
}
|
||||
} else {
|
||||
suggest.add(stats.getSuggest());
|
||||
}
|
||||
if (requestCache == null) {
|
||||
if (stats.getRequestCache() != null) {
|
||||
requestCache = new RequestCacheStats();
|
||||
|
@ -468,11 +456,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
return translog;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public SuggestStats getSuggest() {
|
||||
return suggest;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public RequestCacheStats getRequestCache() {
|
||||
return requestCache;
|
||||
|
@ -555,7 +538,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
segments = SegmentsStats.readSegmentsStats(in);
|
||||
}
|
||||
translog = in.readOptionalStreamable(TranslogStats::new);
|
||||
suggest = in.readOptionalStreamable(SuggestStats::new);
|
||||
requestCache = in.readOptionalStreamable(RequestCacheStats::new);
|
||||
recoveryStats = in.readOptionalStreamable(RecoveryStats::new);
|
||||
}
|
||||
|
@ -647,7 +629,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
segments.writeTo(out);
|
||||
}
|
||||
out.writeOptionalStreamable(translog);
|
||||
out.writeOptionalStreamable(suggest);
|
||||
out.writeOptionalStreamable(requestCache);
|
||||
out.writeOptionalStreamable(recoveryStats);
|
||||
}
|
||||
|
@ -700,9 +681,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
if (translog != null) {
|
||||
translog.toXContent(builder, params);
|
||||
}
|
||||
if (suggest != null) {
|
||||
suggest.toXContent(builder, params);
|
||||
}
|
||||
if (requestCache != null) {
|
||||
requestCache.toXContent(builder, params);
|
||||
}
|
||||
|
|
|
@ -190,7 +190,7 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
out.writeStringArrayNullable(groups);
|
||||
out.writeStringArrayNullable(fieldDataFields);
|
||||
out.writeStringArrayNullable(completionDataFields);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
out.writeBoolean(includeSegmentFileSizes);
|
||||
}
|
||||
}
|
||||
|
@ -208,7 +208,7 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
groups = in.readStringArray();
|
||||
fieldDataFields = in.readStringArray();
|
||||
completionDataFields = in.readStringArray();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
includeSegmentFileSizes = in.readBoolean();
|
||||
} else {
|
||||
includeSegmentFileSizes = false;
|
||||
|
@ -244,7 +244,7 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
Completion("completion"),
|
||||
Segments("segments"),
|
||||
Translog("translog"),
|
||||
Suggest("suggest"),
|
||||
Suggest("suggest"), // unused
|
||||
RequestCache("request_cache"),
|
||||
Recovery("recovery");
|
||||
|
||||
|
|
|
@ -152,11 +152,6 @@ public class IndicesStatsRequestBuilder extends BroadcastOperationRequestBuilder
|
|||
return this;
|
||||
}
|
||||
|
||||
public IndicesStatsRequestBuilder setSuggest(boolean suggest) {
|
||||
request.suggest(suggest);
|
||||
return this;
|
||||
}
|
||||
|
||||
public IndicesStatsRequestBuilder setRequestCache(boolean requestCache) {
|
||||
request.requestCache(requestCache);
|
||||
return this;
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.action.percolate;
|
|||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -127,7 +126,7 @@ public class PercolateRequestBuilder extends ActionRequestBuilder<PercolateReque
|
|||
/**
|
||||
* Delegates to {@link PercolateSourceBuilder#addSort(SortBuilder)}
|
||||
*/
|
||||
public PercolateRequestBuilder addSort(SortBuilder sort) {
|
||||
public PercolateRequestBuilder addSort(SortBuilder<?> sort) {
|
||||
sourceBuilder().addSort(sort);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -48,13 +48,13 @@ import java.util.Map;
|
|||
public class PercolateSourceBuilder extends ToXContentToBytes {
|
||||
|
||||
private DocBuilder docBuilder;
|
||||
private QueryBuilder queryBuilder;
|
||||
private QueryBuilder<?> queryBuilder;
|
||||
private Integer size;
|
||||
private List<SortBuilder> sorts;
|
||||
private List<SortBuilder<?>> sorts;
|
||||
private Boolean trackScores;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private List<AggregatorBuilder<?>> aggregationBuilders;
|
||||
private List<PipelineAggregatorBuilder> pipelineAggregationBuilders;
|
||||
private List<PipelineAggregatorBuilder<?>> pipelineAggregationBuilders;
|
||||
|
||||
/**
|
||||
* Sets the document to run the percolate queries against.
|
||||
|
@ -68,7 +68,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
* Sets a query to reduce the number of percolate queries to be evaluated and score the queries that match based
|
||||
* on this query.
|
||||
*/
|
||||
public PercolateSourceBuilder setQueryBuilder(QueryBuilder queryBuilder) {
|
||||
public PercolateSourceBuilder setQueryBuilder(QueryBuilder<?> queryBuilder) {
|
||||
this.queryBuilder = queryBuilder;
|
||||
return this;
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
*
|
||||
* By default the matching percolator queries are returned in an undefined order.
|
||||
*/
|
||||
public PercolateSourceBuilder addSort(SortBuilder sort) {
|
||||
public PercolateSourceBuilder addSort(SortBuilder<?> sort) {
|
||||
if (sorts == null) {
|
||||
sorts = new ArrayList<>();
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
/**
|
||||
* Add an aggregation definition.
|
||||
*/
|
||||
public PercolateSourceBuilder addAggregation(PipelineAggregatorBuilder aggregationBuilder) {
|
||||
public PercolateSourceBuilder addAggregation(PipelineAggregatorBuilder<?> aggregationBuilder) {
|
||||
if (pipelineAggregationBuilders == null) {
|
||||
pipelineAggregationBuilders = new ArrayList<>();
|
||||
}
|
||||
|
@ -160,10 +160,8 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
}
|
||||
if (sorts != null) {
|
||||
builder.startArray("sort");
|
||||
for (SortBuilder sort : sorts) {
|
||||
builder.startObject();
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
sort.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
@ -182,7 +180,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
}
|
||||
}
|
||||
if (pipelineAggregationBuilders != null) {
|
||||
for (PipelineAggregatorBuilder aggregation : pipelineAggregationBuilders) {
|
||||
for (PipelineAggregatorBuilder<?> aggregation : pipelineAggregationBuilders) {
|
||||
aggregation.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -295,6 +295,13 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
|||
return this.requestCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the request only has suggest
|
||||
*/
|
||||
public boolean isSuggestOnly() {
|
||||
return source != null && source.isSuggestOnly();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.action.search.SearchType.QUERY_AND_FETCH;
|
||||
import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -72,6 +73,17 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
// if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard
|
||||
searchRequest.searchType(QUERY_AND_FETCH);
|
||||
}
|
||||
if (searchRequest.isSuggestOnly()) {
|
||||
// disable request cache if we have only suggest
|
||||
searchRequest.requestCache(false);
|
||||
switch (searchRequest.searchType()) {
|
||||
case DFS_QUERY_AND_FETCH:
|
||||
case DFS_QUERY_THEN_FETCH:
|
||||
// convert to Q_T_F if we have only suggest
|
||||
searchRequest.searchType(QUERY_THEN_FETCH);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (IndexNotFoundException | IndexClosedException e) {
|
||||
// ignore these failures, we will notify the search response if its really the case from the actual action
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.suggest;
|
||||
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Internal suggest request executed directly against a specific index shard.
|
||||
*/
|
||||
public final class ShardSuggestRequest extends BroadcastShardRequest {
|
||||
|
||||
private SuggestBuilder suggest;
|
||||
|
||||
public ShardSuggestRequest() {
|
||||
}
|
||||
|
||||
ShardSuggestRequest(ShardId shardId, SuggestRequest request) {
|
||||
super(shardId, request);
|
||||
this.suggest = request.suggest();
|
||||
}
|
||||
|
||||
public SuggestBuilder suggest() {
|
||||
return suggest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
suggest = SuggestBuilder.PROTOTYPE.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
suggest.writeTo(out);
|
||||
}
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.suggest;
|
||||
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Internal suggest response of a shard suggest request executed directly against a specific shard.
|
||||
*/
|
||||
class ShardSuggestResponse extends BroadcastShardResponse {
|
||||
|
||||
private final Suggest suggest;
|
||||
|
||||
ShardSuggestResponse() {
|
||||
this.suggest = new Suggest();
|
||||
}
|
||||
|
||||
ShardSuggestResponse(ShardId shardId, Suggest suggest) {
|
||||
super(shardId);
|
||||
this.suggest = suggest;
|
||||
}
|
||||
|
||||
public Suggest getSuggest() {
|
||||
return this.suggest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
suggest.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
suggest.writeTo(out);
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.suggest;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class SuggestAction extends Action<SuggestRequest, SuggestResponse, SuggestRequestBuilder> {
|
||||
|
||||
public static final SuggestAction INSTANCE = new SuggestAction();
|
||||
public static final String NAME = "indices:data/read/suggest";
|
||||
|
||||
private SuggestAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestResponse newResponse() {
|
||||
return new SuggestResponse(new Suggest());
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestRequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new SuggestRequestBuilder(client, this);
|
||||
}
|
||||
}
|
|
@ -1,154 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.suggest;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A request to get suggestions for corrections of phrases. Best created with
|
||||
* {@link org.elasticsearch.client.Requests#suggestRequest(String...)}.
|
||||
* <p>
|
||||
* The request requires the suggest query source to be set using
|
||||
* {@link #suggest(org.elasticsearch.search.suggest.SuggestBuilder)}
|
||||
*
|
||||
* @see SuggestResponse
|
||||
* @see org.elasticsearch.client.Client#suggest(SuggestRequest)
|
||||
* @see org.elasticsearch.client.Requests#suggestRequest(String...)
|
||||
* @see org.elasticsearch.search.suggest.SuggestBuilders
|
||||
*/
|
||||
public final class SuggestRequest extends BroadcastRequest<SuggestRequest> {
|
||||
|
||||
@Nullable
|
||||
private String routing;
|
||||
|
||||
@Nullable
|
||||
private String preference;
|
||||
|
||||
private SuggestBuilder suggest;
|
||||
|
||||
public SuggestRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new suggest request against the provided indices. No indices provided means it will
|
||||
* run against all indices.
|
||||
*/
|
||||
public SuggestRequest(String... indices) {
|
||||
super(indices);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = super.validate();
|
||||
return validationException;
|
||||
}
|
||||
|
||||
/**
|
||||
* The suggestion query to get correction suggestions for
|
||||
*/
|
||||
public SuggestBuilder suggest() {
|
||||
return suggest;
|
||||
}
|
||||
|
||||
/**
|
||||
* set a new source for the suggest query
|
||||
*/
|
||||
public SuggestRequest suggest(SuggestBuilder suggest) {
|
||||
Objects.requireNonNull(suggest, "suggest must not be null");
|
||||
this.suggest = suggest;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A comma separated list of routing values to control the shards the search will be executed on.
|
||||
*/
|
||||
public String routing() {
|
||||
return this.routing;
|
||||
}
|
||||
|
||||
/**
|
||||
* A comma separated list of routing values to control the shards the search will be executed on.
|
||||
*/
|
||||
public SuggestRequest routing(String routing) {
|
||||
this.routing = routing;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The routing values to control the shards that the search will be executed on.
|
||||
*/
|
||||
public SuggestRequest routing(String... routings) {
|
||||
this.routing = Strings.arrayToCommaDelimitedString(routings);
|
||||
return this;
|
||||
}
|
||||
|
||||
public SuggestRequest preference(String preference) {
|
||||
this.preference = preference;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String preference() {
|
||||
return this.preference;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
routing = in.readOptionalString();
|
||||
preference = in.readOptionalString();
|
||||
suggest = SuggestBuilder.PROTOTYPE.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
Objects.requireNonNull(suggest, "suggest must not be null");
|
||||
super.writeTo(out);
|
||||
out.writeOptionalString(routing);
|
||||
out.writeOptionalString(preference);
|
||||
suggest.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
Objects.requireNonNull(suggest, "suggest must not be null");
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
builder = suggest.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
sSource = builder.string();
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
return "[" + Arrays.toString(indices) + "]" + ", suggest[" + sSource + "]";
|
||||
}
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.suggest;
|
||||
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
|
||||
/**
|
||||
* A suggest action request builder.
|
||||
*/
|
||||
public class SuggestRequestBuilder extends BroadcastOperationRequestBuilder<SuggestRequest, SuggestResponse, SuggestRequestBuilder> {
|
||||
|
||||
final SuggestBuilder suggest = new SuggestBuilder();
|
||||
|
||||
public SuggestRequestBuilder(ElasticsearchClient client, SuggestAction action) {
|
||||
super(client, action, new SuggestRequest());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a definition for suggestions to the request
|
||||
* @param name the name for the suggestion that will also be used in the response
|
||||
* @param suggestion the suggestion configuration
|
||||
*/
|
||||
public SuggestRequestBuilder addSuggestion(String name, SuggestionBuilder<?> suggestion) {
|
||||
suggest.addSuggestion(name, suggestion);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A comma separated list of routing values to control the shards the search will be executed on.
|
||||
*/
|
||||
public SuggestRequestBuilder setRouting(String routing) {
|
||||
request.routing(routing);
|
||||
return this;
|
||||
}
|
||||
|
||||
public SuggestRequestBuilder setSuggestText(String globalText) {
|
||||
this.suggest.setGlobalText(globalText);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
|
||||
* <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards,
|
||||
* _shards:x,y to operate on shards x & y, or a custom value, which guarantees that the same order
|
||||
* will be used across different requests.
|
||||
*/
|
||||
public SuggestRequestBuilder setPreference(String preference) {
|
||||
request.preference(preference);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The routing values to control the shards that the search will be executed on.
|
||||
*/
|
||||
public SuggestRequestBuilder setRouting(String... routing) {
|
||||
request.routing(routing);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SuggestRequest beforeExecute(SuggestRequest request) {
|
||||
request.suggest(suggest);
|
||||
return request;
|
||||
}
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.suggest;
|
||||
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
|
||||
|
||||
/**
|
||||
* The response of the suggest action.
|
||||
*/
|
||||
public final class SuggestResponse extends BroadcastResponse {
|
||||
|
||||
private final Suggest suggest;
|
||||
|
||||
SuggestResponse(Suggest suggest) {
|
||||
this.suggest = suggest;
|
||||
}
|
||||
|
||||
SuggestResponse(Suggest suggest, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) {
|
||||
super(totalShards, successfulShards, failedShards, shardFailures);
|
||||
this.suggest = suggest;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Suggestions of the phrase.
|
||||
*/
|
||||
public Suggest getSuggest() {
|
||||
return suggest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
this.suggest.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
this.suggest.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
|
||||
builder.startObject();
|
||||
suggest.toXContent(builder, EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
return builder.string();
|
||||
} catch (IOException e) {
|
||||
return "{ \"error\" : \"" + e.getMessage() + "\"}";
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,152 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.suggest;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.broadcast.TransportBroadcastAction;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockLevel;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.routing.GroupShardsIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.suggest.stats.ShardSuggestMetric;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestPhase;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
/**
|
||||
* Defines the transport of a suggestion request across the cluster
|
||||
*/
|
||||
public class TransportSuggestAction
|
||||
extends TransportBroadcastAction<SuggestRequest, SuggestResponse, ShardSuggestRequest, ShardSuggestResponse> {
|
||||
|
||||
private final IndicesService indicesService;
|
||||
private final SuggestPhase suggestPhase;
|
||||
|
||||
@Inject
|
||||
public TransportSuggestAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
|
||||
TransportService transportService, IndicesService indicesService, SuggestPhase suggestPhase,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, SuggestAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
|
||||
SuggestRequest::new, ShardSuggestRequest::new, ThreadPool.Names.SUGGEST);
|
||||
this.indicesService = indicesService;
|
||||
this.suggestPhase = suggestPhase;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ShardSuggestRequest newShardRequest(int numShards, ShardRouting shard, SuggestRequest request) {
|
||||
return new ShardSuggestRequest(shard.shardId(), request);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ShardSuggestResponse newShardResponse() {
|
||||
return new ShardSuggestResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GroupShardsIterator shards(ClusterState clusterState, SuggestRequest request, String[] concreteIndices) {
|
||||
Map<String, Set<String>> routingMap =
|
||||
indexNameExpressionResolver.resolveSearchRouting(clusterState, request.routing(), request.indices());
|
||||
return clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkGlobalBlock(ClusterState state, SuggestRequest request) {
|
||||
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ClusterBlockException checkRequestBlock(ClusterState state, SuggestRequest countRequest, String[] concreteIndices) {
|
||||
return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SuggestResponse newResponse(SuggestRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) {
|
||||
int successfulShards = 0;
|
||||
int failedShards = 0;
|
||||
|
||||
final Map<String, List<Suggest.Suggestion>> groupedSuggestions = new HashMap<>();
|
||||
|
||||
List<ShardOperationFailedException> shardFailures = null;
|
||||
for (int i = 0; i < shardsResponses.length(); i++) {
|
||||
Object shardResponse = shardsResponses.get(i);
|
||||
if (shardResponse == null) {
|
||||
// simply ignore non active shards
|
||||
} else if (shardResponse instanceof BroadcastShardOperationFailedException) {
|
||||
failedShards++;
|
||||
if (shardFailures == null) {
|
||||
shardFailures = new ArrayList<>();
|
||||
}
|
||||
shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse));
|
||||
} else {
|
||||
Suggest suggest = ((ShardSuggestResponse) shardResponse).getSuggest();
|
||||
Suggest.group(groupedSuggestions, suggest);
|
||||
successfulShards++;
|
||||
}
|
||||
}
|
||||
|
||||
return new SuggestResponse(new Suggest(Suggest.reduce(groupedSuggestions)), shardsResponses.length(),
|
||||
successfulShards, failedShards, shardFailures);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) {
|
||||
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
|
||||
IndexShard indexShard = indexService.getShard(request.shardId().id());
|
||||
ShardSuggestMetric suggestMetric = indexShard.getSuggestMetric();
|
||||
suggestMetric.preSuggest();
|
||||
long startTime = System.nanoTime();
|
||||
try (Engine.Searcher searcher = indexShard.acquireSearcher("suggest")) {
|
||||
SuggestBuilder suggest = request.suggest();
|
||||
if (suggest != null) {
|
||||
final SuggestionSearchContext context = suggest.build(indexService.newQueryShardContext());
|
||||
final Suggest result = suggestPhase.execute(context, searcher.searcher());
|
||||
return new ShardSuggestResponse(request.shardId(), result);
|
||||
}
|
||||
return new ShardSuggestResponse(request.shardId(), new Suggest());
|
||||
} catch (Throwable ex) {
|
||||
throw new ElasticsearchException("failed to execute suggest", ex);
|
||||
} finally {
|
||||
suggestMetric.postSuggest(System.nanoTime() - startTime);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Suggest action.
|
||||
*/
|
||||
package org.elasticsearch.action.suggest;
|
|
@ -60,7 +60,7 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
|||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if (taskId.isSet() == false && nodesIds.length > 0) {
|
||||
if (taskId.isSet() && nodesIds.length > 0) {
|
||||
validationException = addValidationError("task id cannot be used together with node ids",
|
||||
validationException);
|
||||
}
|
||||
|
@ -165,12 +165,12 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
|||
if (getActions() != null && getActions().length > 0 && Regex.simpleMatch(getActions(), task.getAction()) == false) {
|
||||
return false;
|
||||
}
|
||||
if (getTaskId().isSet() == false) {
|
||||
if (getTaskId().isSet()) {
|
||||
if(getTaskId().getId() != task.getId()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (parentTaskId.isSet() == false) {
|
||||
if (parentTaskId.isSet()) {
|
||||
if (parentTaskId.equals(task.getParentTaskId()) == false) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -125,14 +125,14 @@ public abstract class TransportTasksAction<
|
|||
|
||||
protected String[] resolveNodes(TasksRequest request, ClusterState clusterState) {
|
||||
if (request.getTaskId().isSet()) {
|
||||
return clusterState.nodes().resolveNodesIds(request.getNodesIds());
|
||||
} else {
|
||||
return new String[]{request.getTaskId().getNodeId()};
|
||||
} else {
|
||||
return clusterState.nodes().resolveNodesIds(request.getNodesIds());
|
||||
}
|
||||
}
|
||||
|
||||
protected void processTasks(TasksRequest request, Consumer<OperationTask> operation) {
|
||||
if (request.getTaskId().isSet() == false) {
|
||||
if (request.getTaskId().isSet()) {
|
||||
// we are only checking one task, we can optimize it
|
||||
Task task = taskManager.getTask(request.getTaskId().getId());
|
||||
if (task != null) {
|
||||
|
|
|
@ -123,7 +123,9 @@ final class BootstrapCheck {
|
|||
if (Constants.LINUX) {
|
||||
checks.add(new MaxNumberOfThreadsCheck());
|
||||
}
|
||||
checks.add(new MaxSizeVirtualMemoryCheck());
|
||||
if (Constants.LINUX || Constants.MAC_OS_X) {
|
||||
checks.add(new MaxSizeVirtualMemoryCheck());
|
||||
}
|
||||
return Collections.unmodifiableList(checks);
|
||||
}
|
||||
|
||||
|
|
|
@ -76,6 +76,9 @@ class Elasticsearch extends Command {
|
|||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
if (options.nonOptionArguments().isEmpty() == false) {
|
||||
throw new UserError(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments());
|
||||
}
|
||||
if (options.has(versionOption)) {
|
||||
if (options.has(daemonizeOption) || options.has(pidfileOption)) {
|
||||
throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option");
|
||||
|
|
|
@ -68,9 +68,6 @@ import org.elasticsearch.action.search.SearchRequestBuilder;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.action.search.SearchScrollRequestBuilder;
|
||||
import org.elasticsearch.action.suggest.SuggestRequest;
|
||||
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
|
||||
import org.elasticsearch.action.suggest.SuggestResponse;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
|
||||
|
@ -367,29 +364,6 @@ public interface Client extends ElasticsearchClient, Releasable {
|
|||
*/
|
||||
MultiGetRequestBuilder prepareMultiGet();
|
||||
|
||||
/**
|
||||
* Suggestion matching a specific phrase.
|
||||
*
|
||||
* @param request The suggest request
|
||||
* @return The result future
|
||||
* @see Requests#suggestRequest(String...)
|
||||
*/
|
||||
ActionFuture<SuggestResponse> suggest(SuggestRequest request);
|
||||
|
||||
/**
|
||||
* Suggestions matching a specific phrase.
|
||||
*
|
||||
* @param request The suggest request
|
||||
* @param listener A listener to be notified of the result
|
||||
* @see Requests#suggestRequest(String...)
|
||||
*/
|
||||
void suggest(SuggestRequest request, ActionListener<SuggestResponse> listener);
|
||||
|
||||
/**
|
||||
* Suggestions matching a specific phrase.
|
||||
*/
|
||||
SuggestRequestBuilder prepareSuggest(String... indices);
|
||||
|
||||
/**
|
||||
* Search across one or more indices and one or more types with a query.
|
||||
*
|
||||
|
|
|
@ -60,9 +60,7 @@ import org.elasticsearch.action.get.GetRequest;
|
|||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.action.suggest.SuggestRequest;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
|
||||
/**
|
||||
* A handy one stop shop for creating requests (make sure to import static this class).
|
||||
|
@ -126,16 +124,6 @@ public class Requests {
|
|||
return new GetRequest(index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a suggest request for getting suggestions from provided <code>indices</code>.
|
||||
* The suggest query has to be set using {@link org.elasticsearch.action.suggest.SuggestRequest#suggest(SuggestBuilder)}.
|
||||
* @param indices The indices to suggest from. Use <tt>null</tt> or <tt>_all</tt> to execute against all indices
|
||||
* @see org.elasticsearch.client.Client#suggest(org.elasticsearch.action.suggest.SuggestRequest)
|
||||
*/
|
||||
public static SuggestRequest suggestRequest(String... indices) {
|
||||
return new SuggestRequest(indices);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a search request against one or more indices. Note, the search source must be set either using the
|
||||
* actual JSON search source, or the {@link org.elasticsearch.search.builder.SearchSourceBuilder}.
|
||||
|
|
|
@ -314,10 +314,6 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.action.search.SearchScrollAction;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.action.search.SearchScrollRequestBuilder;
|
||||
import org.elasticsearch.action.suggest.SuggestAction;
|
||||
import org.elasticsearch.action.suggest.SuggestRequest;
|
||||
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
|
||||
import org.elasticsearch.action.suggest.SuggestResponse;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.support.ThreadedActionListener;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsAction;
|
||||
|
@ -660,21 +656,6 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
|||
return new MultiSearchRequestBuilder(this, MultiSearchAction.INSTANCE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionFuture<SuggestResponse> suggest(final SuggestRequest request) {
|
||||
return execute(SuggestAction.INSTANCE, request);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void suggest(final SuggestRequest request, final ActionListener<SuggestResponse> listener) {
|
||||
execute(SuggestAction.INSTANCE, request, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestRequestBuilder prepareSuggest(String... indices) {
|
||||
return new SuggestRequestBuilder(this, SuggestAction.INSTANCE).setIndices(indices);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionFuture<TermVectorsResponse> termVectors(final TermVectorsRequest request) {
|
||||
return execute(TermVectorsAction.INSTANCE, request);
|
||||
|
|
|
@ -266,7 +266,7 @@ public final class ShardRouting implements Streamable, ToXContent {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (indexMetaData.activeAllocationIds(id()).isEmpty() && indexMetaData.getCreationVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (indexMetaData.activeAllocationIds(id()).isEmpty() && indexMetaData.getCreationVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// when no shards with this id have ever been active for this index
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -445,5 +445,10 @@ public class PagedBytesReference implements BytesReference {
|
|||
// do nothing
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
return length - pos;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,6 +59,11 @@ public abstract class FilterStreamInput extends StreamInput {
|
|||
delegate.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
return delegate.available();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Version getVersion() {
|
||||
return delegate.getVersion();
|
||||
|
|
|
@ -74,6 +74,11 @@ public class InputStreamStreamInput extends StreamInput {
|
|||
is.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
return is.available();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
return is.read();
|
||||
|
|
|
@ -37,14 +37,13 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
|||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.ingest.IngestStats;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.completion.context.QueryContext;
|
||||
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -68,7 +67,6 @@ import java.util.HashMap;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.ElasticsearchException.readException;
|
||||
|
@ -375,6 +373,9 @@ public abstract class StreamInput extends InputStream {
|
|||
@Override
|
||||
public abstract void close() throws IOException;
|
||||
|
||||
@Override
|
||||
public abstract int available() throws IOException;
|
||||
|
||||
public String[] readStringArray() throws IOException {
|
||||
int size = readVInt();
|
||||
if (size == 0) {
|
||||
|
@ -687,21 +688,21 @@ public abstract class StreamInput extends InputStream {
|
|||
/**
|
||||
* Reads a {@link AggregatorBuilder} from the current stream
|
||||
*/
|
||||
public AggregatorBuilder readAggregatorFactory() throws IOException {
|
||||
public AggregatorBuilder<?> readAggregatorFactory() throws IOException {
|
||||
return readNamedWriteable(AggregatorBuilder.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a {@link PipelineAggregatorBuilder} from the current stream
|
||||
*/
|
||||
public PipelineAggregatorBuilder readPipelineAggregatorFactory() throws IOException {
|
||||
public PipelineAggregatorBuilder<?> readPipelineAggregatorFactory() throws IOException {
|
||||
return readNamedWriteable(PipelineAggregatorBuilder.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a {@link QueryBuilder} from the current stream
|
||||
*/
|
||||
public QueryBuilder readQuery() throws IOException {
|
||||
public QueryBuilder<?> readQuery() throws IOException {
|
||||
return readNamedWriteable(QueryBuilder.class);
|
||||
}
|
||||
|
||||
|
@ -726,6 +727,13 @@ public abstract class StreamInput extends InputStream {
|
|||
return readNamedWriteable(SuggestionBuilder.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a {@link SortBuilder} from the current stream
|
||||
*/
|
||||
public SortBuilder<?> readSortBuilder() throws IOException {
|
||||
return readNamedWriteable(SortBuilder.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a {@link org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder} from the current stream
|
||||
*/
|
||||
|
|
|
@ -36,13 +36,13 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
|||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.completion.context.QueryContext;
|
||||
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.joda.time.ReadableInstant;
|
||||
|
||||
import java.io.EOFException;
|
||||
|
@ -532,7 +532,7 @@ public abstract class StreamOutput extends OutputStream {
|
|||
}
|
||||
}
|
||||
|
||||
public void writeOptionalWriteable(@Nullable Writeable writeable) throws IOException {
|
||||
public void writeOptionalWriteable(@Nullable Writeable<?> writeable) throws IOException {
|
||||
if (writeable != null) {
|
||||
writeBoolean(true);
|
||||
writeable.writeTo(this);
|
||||
|
@ -663,7 +663,7 @@ public abstract class StreamOutput extends OutputStream {
|
|||
/**
|
||||
* Writes a {@link NamedWriteable} to the current stream, by first writing its name and then the object itself
|
||||
*/
|
||||
void writeNamedWriteable(NamedWriteable namedWriteable) throws IOException {
|
||||
void writeNamedWriteable(NamedWriteable<?> namedWriteable) throws IOException {
|
||||
writeString(namedWriteable.getWriteableName());
|
||||
namedWriteable.writeTo(this);
|
||||
}
|
||||
|
@ -685,7 +685,7 @@ public abstract class StreamOutput extends OutputStream {
|
|||
/**
|
||||
* Writes a {@link QueryBuilder} to the current stream
|
||||
*/
|
||||
public void writeQuery(QueryBuilder queryBuilder) throws IOException {
|
||||
public void writeQuery(QueryBuilder<?> queryBuilder) throws IOException {
|
||||
writeNamedWriteable(queryBuilder);
|
||||
}
|
||||
|
||||
|
@ -745,8 +745,15 @@ public abstract class StreamOutput extends OutputStream {
|
|||
/**
|
||||
* Writes a {@link SuggestionBuilder} to the current stream
|
||||
*/
|
||||
public void writeSuggestion(SuggestionBuilder suggestion) throws IOException {
|
||||
public void writeSuggestion(SuggestionBuilder<?> suggestion) throws IOException {
|
||||
writeNamedWriteable(suggestion);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a {@link SortBuilder} to the current stream
|
||||
*/
|
||||
public void writeSortBuilder(SortBuilder<?> sort) throws IOException {
|
||||
writeNamedWriteable(sort);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -76,8 +76,9 @@ public class PropertyPlaceholder {
|
|||
* @param placeholderResolver the <code>PlaceholderResolver</code> to use for replacement.
|
||||
* @return the supplied value with placeholders replaced inline.
|
||||
*/
|
||||
public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) {
|
||||
Objects.requireNonNull(value, "Argument 'value' must not be null.");
|
||||
public String replacePlaceholders(String key, String value, PlaceholderResolver placeholderResolver) {
|
||||
Objects.requireNonNull(key);
|
||||
Objects.requireNonNull(value, "value can not be null for [" + key + "]");
|
||||
return parseStringValue(value, placeholderResolver, new HashSet<String>());
|
||||
}
|
||||
|
||||
|
|
|
@ -523,6 +523,28 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
return new Setting<>(key, defaultValue, (s) -> ByteSizeValue.parseBytesSizeValue(s, key), properties);
|
||||
}
|
||||
|
||||
public static Setting<ByteSizeValue> byteSizeSetting(String key, ByteSizeValue value, ByteSizeValue minValue, ByteSizeValue maxValue,
|
||||
Property... properties) {
|
||||
return byteSizeSetting(key, (s) -> value.toString(), minValue, maxValue, properties);
|
||||
}
|
||||
|
||||
public static Setting<ByteSizeValue> byteSizeSetting(String key, Function<Settings, String> defaultValue,
|
||||
ByteSizeValue minValue, ByteSizeValue maxValue,
|
||||
Property... properties) {
|
||||
return new Setting<>(key, defaultValue, (s) -> parseByteSize(s, minValue, maxValue, key), properties);
|
||||
}
|
||||
|
||||
public static ByteSizeValue parseByteSize(String s, ByteSizeValue minValue, ByteSizeValue maxValue, String key) {
|
||||
ByteSizeValue value = ByteSizeValue.parseBytesSizeValue(s, key);
|
||||
if (value.bytes() < minValue.bytes()) {
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
|
||||
}
|
||||
if (value.bytes() > maxValue.bytes()) {
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
public static Setting<TimeValue> positiveTimeSetting(String key, TimeValue defaultValue, Property... properties) {
|
||||
return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), properties);
|
||||
}
|
||||
|
|
|
@ -1221,7 +1221,7 @@ public final class Settings implements ToXContent {
|
|||
}
|
||||
};
|
||||
for (Map.Entry<String, String> entry : new HashMap<>(map).entrySet()) {
|
||||
String value = propertyPlaceholder.replacePlaceholders(entry.getValue(), placeholderResolver);
|
||||
String value = propertyPlaceholder.replacePlaceholders(entry.getKey(), entry.getValue(), placeholderResolver);
|
||||
// if the values exists and has length, we should maintain it in the map
|
||||
// otherwise, the replace process resolved into removing it
|
||||
if (Strings.hasLength(value)) {
|
||||
|
|
|
@ -118,7 +118,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
|
|||
final boolean enoughAllocationsFound;
|
||||
|
||||
if (lastActiveAllocationIds.isEmpty()) {
|
||||
assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_5_0_0) : "trying to allocated a primary with an empty allocation id set, but index is new";
|
||||
assert Version.indexCreated(indexMetaData.getSettings()).before(Version.V_5_0_0_alpha1) : "trying to allocated a primary with an empty allocation id set, but index is new";
|
||||
// when we load an old index (after upgrading cluster) or restore a snapshot of an old index
|
||||
// fall back to old version-based allocation mode
|
||||
// Note that once the shard has been active, lastActiveAllocationIds will be non-empty
|
||||
|
@ -128,7 +128,7 @@ public abstract class PrimaryShardAllocator extends AbstractComponent {
|
|||
} else {
|
||||
enoughAllocationsFound = isEnoughVersionBasedAllocationsFound(indexMetaData, nodeShardsResult);
|
||||
}
|
||||
logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}", shard.index(), shard.id(), Version.V_5_0_0, nodeShardsResult.allocationsFound, shard);
|
||||
logger.debug("[{}][{}]: version-based allocation for pre-{} index found {} allocations of {}", shard.index(), shard.id(), Version.V_5_0_0_alpha1, nodeShardsResult.allocationsFound, shard);
|
||||
} else {
|
||||
assert lastActiveAllocationIds.isEmpty() == false;
|
||||
// use allocation ids to select nodes
|
||||
|
|
|
@ -127,7 +127,7 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
|
|||
}
|
||||
if (analyzers.containsKey("default_index")) {
|
||||
final Version createdVersion = indexSettings.getIndexVersionCreated();
|
||||
if (createdVersion.onOrAfter(Version.V_5_0_0)) {
|
||||
if (createdVersion.onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use [index.analysis.analyzer.default] instead for index [" + index().getName() + "]");
|
||||
} else {
|
||||
deprecationLogger.deprecated("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] instead for index [{}]", index().getName());
|
||||
|
|
|
@ -348,7 +348,7 @@ public class SegmentsStats implements Streamable, ToXContent {
|
|||
indexWriterMaxMemoryInBytes = in.readLong();
|
||||
bitsetMemoryInBytes = in.readLong();
|
||||
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
int size = in.readVInt();
|
||||
ImmutableOpenMap.Builder<String, Long> map = ImmutableOpenMap.builder(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
@ -376,7 +376,7 @@ public class SegmentsStats implements Streamable, ToXContent {
|
|||
out.writeLong(indexWriterMaxMemoryInBytes);
|
||||
out.writeLong(bitsetMemoryInBytes);
|
||||
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0)) {
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
out.writeVInt(fileSizes.size());
|
||||
for (Iterator<ObjectObjectCursor<String, Long>> it = fileSizes.iterator(); it.hasNext();) {
|
||||
ObjectObjectCursor<String, Long> entry = it.next();
|
||||
|
|
|
@ -219,7 +219,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
}
|
||||
|
||||
protected boolean defaultDocValues(Version indexCreated) {
|
||||
if (indexCreated.onOrAfter(Version.V_5_0_0)) {
|
||||
if (indexCreated.onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// add doc values by default to keyword (boolean, numerics, etc.) fields
|
||||
return fieldType.tokenized() == false;
|
||||
} else {
|
||||
|
@ -229,7 +229,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
|
||||
protected void setupFieldType(BuilderContext context) {
|
||||
fieldType.setName(buildFullName(context));
|
||||
if (context.indexCreatedVersion().before(Version.V_5_0_0)) {
|
||||
if (context.indexCreatedVersion().before(Version.V_5_0_0_alpha1)) {
|
||||
fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f);
|
||||
}
|
||||
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
|
||||
|
@ -289,7 +289,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
if (!customBoost()
|
||||
// don't set boosts eg. on dv fields
|
||||
&& field.fieldType().indexOptions() != IndexOptions.NONE
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
|
|
|
@ -341,7 +341,7 @@ public abstract class MappedFieldType extends FieldType {
|
|||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
TermQuery query = new TermQuery(createTerm(value));
|
||||
if (boost == 1f ||
|
||||
(context != null && context.indexVersionCreated().before(Version.V_5_0_0))) {
|
||||
(context != null && context.indexVersionCreated().before(Version.V_5_0_0_alpha1))) {
|
||||
return query;
|
||||
}
|
||||
return new BoostQuery(query, boost);
|
||||
|
|
|
@ -110,8 +110,6 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
}
|
||||
builder.nullValue(lenientNodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
|
|
@ -261,7 +261,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Byte objValue = fieldType().nullValue();
|
||||
|
@ -294,7 +294,7 @@ public class ByteFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomByteNumericField field = new CustomByteNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -488,7 +488,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
dateAsString = parser.text();
|
||||
} else if (token == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -523,7 +523,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
if (value != null) {
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -253,7 +253,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Double objValue = fieldType().nullValue();
|
||||
|
@ -287,7 +287,7 @@ public class DoubleFieldMapper extends NumberFieldMapper {
|
|||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomDoubleNumericField field = new CustomDoubleNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -265,7 +265,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Float objValue = fieldType().nullValue();
|
||||
|
@ -299,7 +299,7 @@ public class FloatFieldMapper extends NumberFieldMapper {
|
|||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomFloatNumericField field = new CustomFloatNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -270,7 +270,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Integer objValue = fieldType().nullValue();
|
||||
|
@ -307,7 +307,7 @@ public class IntegerFieldMapper extends NumberFieldMapper {
|
|||
protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) {
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomIntegerNumericField field = new CustomIntegerNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -134,8 +134,6 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
|
|||
} else if (propName.equals("eager_global_ordinals")) {
|
||||
builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
|
|
@ -258,7 +258,7 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Long objValue = fieldType().nullValue();
|
||||
|
@ -291,7 +291,7 @@ public class LongFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -266,7 +266,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost);
|
||||
}
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Short objValue = fieldType().nullValue();
|
||||
|
@ -299,7 +299,7 @@ public class ShortFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomShortNumericField field = new CustomShortNumericField(value, fieldType());
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(boost);
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -193,7 +193,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
|
||||
@Override
|
||||
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0)) {
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// Automatically upgrade simple mappings for ease of upgrade, otherwise fail
|
||||
if (SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE.containsAll(node.keySet())) {
|
||||
deprecationLogger.deprecated("The [string] field is deprecated, please use [text] or [keyword] instead on [{}]",
|
||||
|
@ -334,8 +334,6 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
|
||||
DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated());
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, fieldName, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
@ -490,7 +488,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
int positionIncrementGap, int ignoreAbove,
|
||||
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
|
||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0)) {
|
||||
if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
|
||||
+ "or [keyword] field instead for field [" + fieldType.name() + "]");
|
||||
}
|
||||
|
@ -573,7 +571,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType());
|
||||
if (valueAndBoost.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (valueAndBoost.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(valueAndBoost.boost());
|
||||
}
|
||||
fields.add(field);
|
||||
|
@ -600,7 +598,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
|||
return new ValueAndBoost(nullValue, defaultBoost);
|
||||
}
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
String value = nullValue;
|
||||
|
|
|
@ -163,8 +163,6 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
|
|||
builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
|
||||
DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated());
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, fieldName, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
|
|
|
@ -250,7 +250,7 @@ public class TypeParsers {
|
|||
} else if (propName.equals("boost")) {
|
||||
builder.boost(nodeFloatValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (parserContext.indexVersionCreated().before(Version.V_5_0_0)
|
||||
} else if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)
|
||||
&& parseNorms(builder, propName, propNode, parserContext)) {
|
||||
iterator.remove();
|
||||
} else if (propName.equals("index_options")) {
|
||||
|
@ -265,9 +265,11 @@ public class TypeParsers {
|
|||
iterator.remove();
|
||||
} else if (propName.equals("fielddata")
|
||||
&& propNode instanceof Map
|
||||
&& parserContext.indexVersionCreated().before(Version.V_5_0_0)) {
|
||||
&& parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) {
|
||||
// ignore for bw compat
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
} else if (propName.equals("copy_to")) {
|
||||
if (parserContext.isWithinMultiField()) {
|
||||
if (indexVersionCreated.after(Version.V_2_1_0) ||
|
||||
|
@ -434,7 +436,7 @@ public class TypeParsers {
|
|||
}
|
||||
|
||||
private static SimilarityProvider resolveSimilarity(Mapper.TypeParser.ParserContext parserContext, String name, String value) {
|
||||
if (parserContext.indexVersionCreated().before(Version.V_5_0_0) && "default".equals(value)) {
|
||||
if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1) && "default".equals(value)) {
|
||||
// "default" similarity has been renamed into "classic" in 3.x.
|
||||
value = SimilarityService.DEFAULT_SIMILARITY;
|
||||
}
|
||||
|
|
|
@ -220,8 +220,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
|||
} else if (propName.equals(Names.IGNORE_MALFORMED)) {
|
||||
builder.ignoreMalformed(XContentMapValues.lenientNodeBooleanValue(propNode));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -454,7 +453,7 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
}
|
||||
for (Field field : fields) {
|
||||
if (!customBoost() &&
|
||||
fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
context.doc().add(field);
|
||||
|
|
|
@ -124,7 +124,7 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
if (fieldName.equals("enabled")) {
|
||||
builder.enabled(lenientNodeBooleanValue(fieldNode));
|
||||
iterator.remove();
|
||||
} else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_5_0_0)) {
|
||||
} else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) {
|
||||
// ignore on old indices, reject on and after 5.0
|
||||
iterator.remove();
|
||||
} else if (fieldName.equals("includes")) {
|
||||
|
|
|
@ -322,7 +322,7 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
final long value = ipToLong(ipAsString);
|
||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||
CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
|
||||
if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
|
||||
if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) {
|
||||
field.setBoost(fieldType().boost());
|
||||
}
|
||||
fields.add(field);
|
||||
|
|
|
@ -41,12 +41,20 @@ public class DynamicTemplate implements ToXContent {
|
|||
|
||||
public static enum MatchType {
|
||||
SIMPLE {
|
||||
@Override
|
||||
public boolean matches(String pattern, String value) {
|
||||
return Regex.simpleMatch(pattern, value);
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "simple";
|
||||
}
|
||||
},
|
||||
REGEX {
|
||||
@Override
|
||||
public boolean matches(String pattern, String value) {
|
||||
return value.matches(pattern);
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "regex";
|
||||
|
@ -61,6 +69,9 @@ public class DynamicTemplate implements ToXContent {
|
|||
}
|
||||
throw new IllegalArgumentException("No matching pattern matched on [" + value + "]");
|
||||
}
|
||||
|
||||
/** Whether {@code value} matches {@code regex}. */
|
||||
public abstract boolean matches(String regex, String value);
|
||||
}
|
||||
|
||||
public static DynamicTemplate parse(String name, Map<String, Object> conf,
|
||||
|
@ -89,7 +100,7 @@ public class DynamicTemplate implements ToXContent {
|
|||
matchPattern = entry.getValue().toString();
|
||||
} else if ("mapping".equals(propName)) {
|
||||
mapping = (Map<String, Object>) entry.getValue();
|
||||
} else if (indexVersionCreated.onOrAfter(Version.V_5_0_0)) {
|
||||
} else if (indexVersionCreated.onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
// unknown parameters were ignored before but still carried through serialization
|
||||
// so we need to ignore them at parsing time for old indices
|
||||
throw new IllegalArgumentException("Illegal dynamic template parameter: [" + propName + "]");
|
||||
|
@ -137,23 +148,23 @@ public class DynamicTemplate implements ToXContent {
|
|||
}
|
||||
|
||||
public boolean match(ContentPath path, String name, String dynamicType) {
|
||||
if (pathMatch != null && !patternMatch(pathMatch, path.pathAsText(name))) {
|
||||
if (pathMatch != null && !matchType.matches(pathMatch, path.pathAsText(name))) {
|
||||
return false;
|
||||
}
|
||||
if (match != null && !patternMatch(match, name)) {
|
||||
if (match != null && !matchType.matches(match, name)) {
|
||||
return false;
|
||||
}
|
||||
if (pathUnmatch != null && patternMatch(pathUnmatch, path.pathAsText(name))) {
|
||||
if (pathUnmatch != null && matchType.matches(pathUnmatch, path.pathAsText(name))) {
|
||||
return false;
|
||||
}
|
||||
if (unmatch != null && patternMatch(unmatch, name)) {
|
||||
if (unmatch != null && matchType.matches(unmatch, name)) {
|
||||
return false;
|
||||
}
|
||||
if (matchMappingType != null) {
|
||||
if (dynamicType == null) {
|
||||
return false;
|
||||
}
|
||||
if (!patternMatch(matchMappingType, dynamicType)) {
|
||||
if (!matchType.matches(matchMappingType, dynamicType)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -186,13 +197,6 @@ public class DynamicTemplate implements ToXContent {
|
|||
return type;
|
||||
}
|
||||
|
||||
private boolean patternMatch(String pattern, String str) {
|
||||
if (matchType == MatchType.SIMPLE) {
|
||||
return Regex.simpleMatch(pattern, str);
|
||||
}
|
||||
return str.matches(pattern);
|
||||
}
|
||||
|
||||
public Map<String, Object> mappingForName(String name, String dynamicType) {
|
||||
return processMap(mapping, name, dynamicType);
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.document.FieldType;
|
|||
import org.apache.lucene.index.Fields;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.PrefixCodedTerms;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
|
@ -92,10 +93,17 @@ public final class ExtractQueryTermsService {
|
|||
* an UnsupportedQueryException is thrown.
|
||||
*/
|
||||
static Set<Term> extractQueryTerms(Query query) {
|
||||
// TODO: add support for the TermsQuery when it has methods to access the actual terms it encapsulates
|
||||
// TODO: add support for span queries
|
||||
if (query instanceof TermQuery) {
|
||||
return Collections.singleton(((TermQuery) query).getTerm());
|
||||
} else if (query instanceof TermsQuery) {
|
||||
Set<Term> terms = new HashSet<>();
|
||||
TermsQuery termsQuery = (TermsQuery) query;
|
||||
PrefixCodedTerms.TermIterator iterator = termsQuery.getTermData().iterator();
|
||||
for (BytesRef term = iterator.next(); term != null; term = iterator.next()) {
|
||||
terms.add(new Term(iterator.field(), term));
|
||||
}
|
||||
return terms;
|
||||
} else if (query instanceof PhraseQuery) {
|
||||
Term[] terms = ((PhraseQuery) query).getTerms();
|
||||
if (terms.length == 0) {
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.index.StoredFieldVisitor;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -37,11 +36,9 @@ import org.elasticsearch.common.bytes.BytesArray;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.cache.Cache;
|
||||
import org.elasticsearch.common.cache.CacheBuilder;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchLeafReader;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContent;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -143,7 +140,7 @@ public final class PercolatorQueryCache extends AbstractIndexComponent
|
|||
}
|
||||
|
||||
IntObjectHashMap<Query> queries = new IntObjectHashMap<>();
|
||||
boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0);
|
||||
boolean legacyLoading = indexVersionCreated.before(Version.V_5_0_0_alpha1);
|
||||
PostingsEnum postings = leafReader.postings(new Term(TypeFieldMapper.NAME, PercolatorFieldMapper.TYPE_NAME), PostingsEnum.NONE);
|
||||
if (postings != null) {
|
||||
if (legacyLoading) {
|
||||
|
|
|
@ -301,7 +301,7 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
|
|||
documentType, registry, document, docSearcher, percolateTypeQuery
|
||||
);
|
||||
Settings indexSettings = registry.getIndexSettings().getSettings();
|
||||
if (indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null).onOrAfter(Version.V_5_0_0)) {
|
||||
if (indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null).onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
builder.extractQueryTermsQuery(
|
||||
PercolatorFieldMapper.EXTRACTED_TERMS_FULL_FIELD_NAME, PercolatorFieldMapper.UNKNOWN_QUERY_FULL_FIELD_NAME
|
||||
);
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.search.fetch.source.FetchSourceParseElement;
|
|||
import org.elasticsearch.search.highlight.HighlighterParseElement;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
import org.elasticsearch.search.sort.SortParseElement;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -35,7 +35,6 @@ public class InnerHitsQueryParserHelper {
|
|||
|
||||
public static final InnerHitsQueryParserHelper INSTANCE = new InnerHitsQueryParserHelper();
|
||||
|
||||
private static final SortParseElement sortParseElement = new SortParseElement();
|
||||
private static final FetchSourceParseElement sourceParseElement = new FetchSourceParseElement();
|
||||
private static final HighlighterParseElement highlighterParseElement = new HighlighterParseElement();
|
||||
private static final ScriptFieldsParseElement scriptFieldsParseElement = new ScriptFieldsParseElement();
|
||||
|
@ -54,10 +53,10 @@ public class InnerHitsQueryParserHelper {
|
|||
if ("name".equals(fieldName)) {
|
||||
innerHitName = parser.textOrNull();
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sortParseElement, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sortParseElement, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -67,10 +66,10 @@ public class InnerHitsQueryParserHelper {
|
|||
}
|
||||
|
||||
public static void parseCommonInnerHitOptions(XContentParser parser, XContentParser.Token token, String fieldName, SubSearchContext subSearchContext,
|
||||
SortParseElement sortParseElement, FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement,
|
||||
FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement,
|
||||
ScriptFieldsParseElement scriptFieldsParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement) throws Exception {
|
||||
if ("sort".equals(fieldName)) {
|
||||
sortParseElement.parse(parser, subSearchContext);
|
||||
SortBuilder.parseSort(parser, subSearchContext);
|
||||
} else if ("_source".equals(fieldName)) {
|
||||
sourceParseElement.parse(parser, subSearchContext);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
|
|
|
@ -1,134 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A helper that helps with parsing inner queries of the nested query.
|
||||
* 1) Takes into account that type nested path can appear before or after the inner query
|
||||
* 2) Updates the {@link NestedScope} when parsing the inner query.
|
||||
*/
|
||||
public class NestedInnerQueryParseSupport {
|
||||
|
||||
protected final QueryShardContext shardContext;
|
||||
protected final QueryParseContext parseContext;
|
||||
|
||||
private BytesReference source;
|
||||
private Query innerFilter;
|
||||
protected String path;
|
||||
|
||||
private boolean filterParsed = false;
|
||||
protected boolean filterFound = false;
|
||||
|
||||
protected ObjectMapper nestedObjectMapper;
|
||||
|
||||
public NestedInnerQueryParseSupport(XContentParser parser, QueryShardContext context) {
|
||||
shardContext = context;
|
||||
parseContext = shardContext.parseContext();
|
||||
shardContext.reset(parser);
|
||||
}
|
||||
|
||||
public void filter() throws IOException {
|
||||
if (path != null) {
|
||||
setPathLevel();
|
||||
try {
|
||||
innerFilter = QueryBuilder.rewriteQuery(parseContext.parseInnerQueryBuilder(),
|
||||
this.shardContext).toFilter(this.shardContext);
|
||||
} finally {
|
||||
resetPathLevel();
|
||||
}
|
||||
filterParsed = true;
|
||||
} else {
|
||||
source = XContentFactory.smileBuilder().copyCurrentStructure(parseContext.parser()).bytes();
|
||||
}
|
||||
filterFound = true;
|
||||
}
|
||||
|
||||
public Query getInnerFilter() throws IOException {
|
||||
if (filterParsed) {
|
||||
return innerFilter;
|
||||
} else {
|
||||
if (path == null) {
|
||||
throw new QueryShardException(shardContext, "[nested] requires 'path' field");
|
||||
}
|
||||
if (!filterFound) {
|
||||
throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field");
|
||||
}
|
||||
|
||||
setPathLevel();
|
||||
XContentParser old = parseContext.parser();
|
||||
try {
|
||||
XContentParser innerParser = XContentHelper.createParser(source);
|
||||
parseContext.parser(innerParser);
|
||||
innerFilter = QueryBuilder.rewriteQuery(parseContext.parseInnerQueryBuilder(),
|
||||
this.shardContext).toFilter(this.shardContext);
|
||||
filterParsed = true;
|
||||
return innerFilter;
|
||||
} finally {
|
||||
resetPathLevel();
|
||||
parseContext.parser(old);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
nestedObjectMapper = shardContext.getObjectMapper(path);
|
||||
if (nestedObjectMapper == null) {
|
||||
throw new QueryShardException(shardContext, "[nested] failed to find nested object under path [" + path + "]");
|
||||
}
|
||||
if (!nestedObjectMapper.nested().isNested()) {
|
||||
throw new QueryShardException(shardContext, "[nested] nested object under path [" + path + "] is not of nested type");
|
||||
}
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public ObjectMapper getNestedObjectMapper() {
|
||||
return nestedObjectMapper;
|
||||
}
|
||||
|
||||
public boolean filterFound() {
|
||||
return filterFound;
|
||||
}
|
||||
|
||||
private void setPathLevel() {
|
||||
shardContext.nestedScope().nextLevel(nestedObjectMapper);
|
||||
}
|
||||
|
||||
private void resetPathLevel() {
|
||||
shardContext.nestedScope().previousLevel();
|
||||
}
|
||||
|
||||
}
|
|
@ -51,6 +51,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
private long scrollTimeInMillis;
|
||||
private long scrollCurrent;
|
||||
|
||||
private long suggestCount;
|
||||
private long suggestTimeInMillis;
|
||||
private long suggestCurrent;
|
||||
|
||||
Stats() {
|
||||
|
||||
}
|
||||
|
@ -58,7 +62,8 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
public Stats(
|
||||
long queryCount, long queryTimeInMillis, long queryCurrent,
|
||||
long fetchCount, long fetchTimeInMillis, long fetchCurrent,
|
||||
long scrollCount, long scrollTimeInMillis, long scrollCurrent
|
||||
long scrollCount, long scrollTimeInMillis, long scrollCurrent,
|
||||
long suggestCount, long suggestTimeInMillis, long suggestCurrent
|
||||
) {
|
||||
this.queryCount = queryCount;
|
||||
this.queryTimeInMillis = queryTimeInMillis;
|
||||
|
@ -71,13 +76,19 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
this.scrollCount = scrollCount;
|
||||
this.scrollTimeInMillis = scrollTimeInMillis;
|
||||
this.scrollCurrent = scrollCurrent;
|
||||
|
||||
this.suggestCount = suggestCount;
|
||||
this.suggestTimeInMillis = suggestTimeInMillis;
|
||||
this.suggestCurrent = suggestCurrent;
|
||||
|
||||
}
|
||||
|
||||
public Stats(Stats stats) {
|
||||
this(
|
||||
stats.queryCount, stats.queryTimeInMillis, stats.queryCurrent,
|
||||
stats.fetchCount, stats.fetchTimeInMillis, stats.fetchCurrent,
|
||||
stats.scrollCount, stats.scrollTimeInMillis, stats.scrollCurrent
|
||||
stats.scrollCount, stats.scrollTimeInMillis, stats.scrollCurrent,
|
||||
stats.suggestCount, stats.suggestTimeInMillis, stats.suggestCurrent
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -93,6 +104,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
scrollCount += stats.scrollCount;
|
||||
scrollTimeInMillis += stats.scrollTimeInMillis;
|
||||
scrollCurrent += stats.scrollCurrent;
|
||||
|
||||
suggestCount += stats.suggestCount;
|
||||
suggestTimeInMillis += stats.suggestTimeInMillis;
|
||||
suggestCurrent += stats.suggestCurrent;
|
||||
}
|
||||
|
||||
public long getQueryCount() {
|
||||
|
@ -143,6 +158,22 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
return scrollCurrent;
|
||||
}
|
||||
|
||||
public long getSuggestCount() {
|
||||
return suggestCount;
|
||||
}
|
||||
|
||||
public long getSuggestTimeInMillis() {
|
||||
return suggestTimeInMillis;
|
||||
}
|
||||
|
||||
public TimeValue getSuggestTime() {
|
||||
return new TimeValue(suggestTimeInMillis);
|
||||
}
|
||||
|
||||
public long getSuggestCurrent() {
|
||||
return suggestCurrent;
|
||||
}
|
||||
|
||||
public static Stats readStats(StreamInput in) throws IOException {
|
||||
Stats stats = new Stats();
|
||||
stats.readFrom(in);
|
||||
|
@ -162,6 +193,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
scrollCount = in.readVLong();
|
||||
scrollTimeInMillis = in.readVLong();
|
||||
scrollCurrent = in.readVLong();
|
||||
|
||||
suggestCount = in.readVLong();
|
||||
suggestTimeInMillis = in.readVLong();
|
||||
suggestCurrent = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -177,6 +212,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
out.writeVLong(scrollCount);
|
||||
out.writeVLong(scrollTimeInMillis);
|
||||
out.writeVLong(scrollCurrent);
|
||||
|
||||
out.writeVLong(suggestCount);
|
||||
out.writeVLong(suggestTimeInMillis);
|
||||
out.writeVLong(suggestCurrent);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -193,6 +232,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
builder.timeValueField(Fields.SCROLL_TIME_IN_MILLIS, Fields.SCROLL_TIME, scrollTimeInMillis);
|
||||
builder.field(Fields.SCROLL_CURRENT, scrollCurrent);
|
||||
|
||||
builder.field(Fields.SUGGEST_TOTAL, suggestCount);
|
||||
builder.timeValueField(Fields.SUGGEST_TIME_IN_MILLIS, Fields.SUGGEST_TIME, suggestTimeInMillis);
|
||||
builder.field(Fields.SUGGEST_CURRENT, suggestCurrent);
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
@ -292,6 +335,10 @@ public class SearchStats implements Streamable, ToXContent {
|
|||
static final XContentBuilderString SCROLL_TIME = new XContentBuilderString("scroll_time");
|
||||
static final XContentBuilderString SCROLL_TIME_IN_MILLIS = new XContentBuilderString("scroll_time_in_millis");
|
||||
static final XContentBuilderString SCROLL_CURRENT = new XContentBuilderString("scroll_current");
|
||||
static final XContentBuilderString SUGGEST_TOTAL = new XContentBuilderString("suggest_total");
|
||||
static final XContentBuilderString SUGGEST_TIME = new XContentBuilderString("suggest_time");
|
||||
static final XContentBuilderString SUGGEST_TIME_IN_MILLIS = new XContentBuilderString("suggest_time_in_millis");
|
||||
static final XContentBuilderString SUGGEST_CURRENT = new XContentBuilderString("suggest_current");
|
||||
}
|
||||
|
||||
public static SearchStats readSearchStats(StreamInput in) throws IOException {
|
||||
|
|
|
@ -23,13 +23,13 @@ import org.elasticsearch.common.collect.MapBuilder;
|
|||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.SearchSlowLog;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
|
@ -72,64 +72,51 @@ public final class ShardSearchStats {
|
|||
}
|
||||
|
||||
public void onPreQueryPhase(SearchContext searchContext) {
|
||||
totalStats.queryCurrent.inc();
|
||||
if (searchContext.groupStats() != null) {
|
||||
for (int i = 0; i < searchContext.groupStats().size(); i++) {
|
||||
groupStats(searchContext.groupStats().get(i)).queryCurrent.inc();
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
if (searchContext.hasOnlySuggest()) {
|
||||
statsHolder.suggestCurrent.inc();
|
||||
} else {
|
||||
statsHolder.queryCurrent.inc();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void onFailedQueryPhase(SearchContext searchContext) {
|
||||
totalStats.queryCurrent.dec();
|
||||
if (searchContext.groupStats() != null) {
|
||||
for (int i = 0; i < searchContext.groupStats().size(); i++) {
|
||||
groupStats(searchContext.groupStats().get(i)).queryCurrent.dec();
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
if (searchContext.hasOnlySuggest()) {
|
||||
statsHolder.suggestCurrent.dec();
|
||||
} else {
|
||||
statsHolder.queryCurrent.dec();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void onQueryPhase(SearchContext searchContext, long tookInNanos) {
|
||||
totalStats.queryMetric.inc(tookInNanos);
|
||||
totalStats.queryCurrent.dec();
|
||||
if (searchContext.groupStats() != null) {
|
||||
for (int i = 0; i < searchContext.groupStats().size(); i++) {
|
||||
StatsHolder statsHolder = groupStats(searchContext.groupStats().get(i));
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
if (searchContext.hasOnlySuggest()) {
|
||||
statsHolder.suggestMetric.inc(tookInNanos);
|
||||
statsHolder.suggestCurrent.dec();
|
||||
} else {
|
||||
statsHolder.queryMetric.inc(tookInNanos);
|
||||
statsHolder.queryCurrent.dec();
|
||||
}
|
||||
}
|
||||
});
|
||||
slowLogSearchService.onQueryPhase(searchContext, tookInNanos);
|
||||
}
|
||||
|
||||
public void onPreFetchPhase(SearchContext searchContext) {
|
||||
totalStats.fetchCurrent.inc();
|
||||
if (searchContext.groupStats() != null) {
|
||||
for (int i = 0; i < searchContext.groupStats().size(); i++) {
|
||||
groupStats(searchContext.groupStats().get(i)).fetchCurrent.inc();
|
||||
}
|
||||
}
|
||||
computeStats(searchContext, statsHolder -> statsHolder.fetchCurrent.inc());
|
||||
}
|
||||
|
||||
public void onFailedFetchPhase(SearchContext searchContext) {
|
||||
totalStats.fetchCurrent.dec();
|
||||
if (searchContext.groupStats() != null) {
|
||||
for (int i = 0; i < searchContext.groupStats().size(); i++) {
|
||||
groupStats(searchContext.groupStats().get(i)).fetchCurrent.dec();
|
||||
}
|
||||
}
|
||||
computeStats(searchContext, statsHolder -> statsHolder.fetchCurrent.dec());
|
||||
}
|
||||
|
||||
public void onFetchPhase(SearchContext searchContext, long tookInNanos) {
|
||||
totalStats.fetchMetric.inc(tookInNanos);
|
||||
totalStats.fetchCurrent.dec();
|
||||
if (searchContext.groupStats() != null) {
|
||||
for (int i = 0; i < searchContext.groupStats().size(); i++) {
|
||||
StatsHolder statsHolder = groupStats(searchContext.groupStats().get(i));
|
||||
statsHolder.fetchMetric.inc(tookInNanos);
|
||||
statsHolder.fetchCurrent.dec();
|
||||
}
|
||||
}
|
||||
computeStats(searchContext, statsHolder -> {
|
||||
statsHolder.fetchMetric.inc(tookInNanos);
|
||||
statsHolder.fetchCurrent.dec();
|
||||
});
|
||||
slowLogSearchService.onFetchPhase(searchContext, tookInNanos);
|
||||
}
|
||||
|
||||
|
@ -149,6 +136,15 @@ public final class ShardSearchStats {
|
|||
}
|
||||
}
|
||||
|
||||
private void computeStats(SearchContext searchContext, Consumer<StatsHolder> consumer) {
|
||||
consumer.accept(totalStats);
|
||||
if (searchContext.groupStats() != null) {
|
||||
for (int i = 0; i < searchContext.groupStats().size(); i++) {
|
||||
consumer.accept(groupStats(searchContext.groupStats().get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private StatsHolder groupStats(String group) {
|
||||
StatsHolder stats = groupsStats.get(group);
|
||||
if (stats == null) {
|
||||
|
@ -184,26 +180,30 @@ public final class ShardSearchStats {
|
|||
public final MeanMetric queryMetric = new MeanMetric();
|
||||
public final MeanMetric fetchMetric = new MeanMetric();
|
||||
public final MeanMetric scrollMetric = new MeanMetric();
|
||||
public final MeanMetric suggestMetric = new MeanMetric();
|
||||
public final CounterMetric queryCurrent = new CounterMetric();
|
||||
public final CounterMetric fetchCurrent = new CounterMetric();
|
||||
public final CounterMetric scrollCurrent = new CounterMetric();
|
||||
public final CounterMetric suggestCurrent = new CounterMetric();
|
||||
|
||||
public SearchStats.Stats stats() {
|
||||
return new SearchStats.Stats(
|
||||
queryMetric.count(), TimeUnit.NANOSECONDS.toMillis(queryMetric.sum()), queryCurrent.count(),
|
||||
fetchMetric.count(), TimeUnit.NANOSECONDS.toMillis(fetchMetric.sum()), fetchCurrent.count(),
|
||||
scrollMetric.count(), TimeUnit.NANOSECONDS.toMillis(scrollMetric.sum()), scrollCurrent.count()
|
||||
scrollMetric.count(), TimeUnit.NANOSECONDS.toMillis(scrollMetric.sum()), scrollCurrent.count(),
|
||||
suggestMetric.count(), TimeUnit.NANOSECONDS.toMillis(suggestMetric.sum()), suggestCurrent.count()
|
||||
);
|
||||
}
|
||||
|
||||
public long totalCurrent() {
|
||||
return queryCurrent.count() + fetchCurrent.count() + scrollCurrent.count();
|
||||
return queryCurrent.count() + fetchCurrent.count() + scrollCurrent.count() + suggestCurrent.count();
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
queryMetric.clear();
|
||||
fetchMetric.clear();
|
||||
scrollMetric.clear();
|
||||
suggestMetric.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,8 +92,6 @@ import org.elasticsearch.index.store.Store;
|
|||
import org.elasticsearch.index.store.Store.MetadataSnapshot;
|
||||
import org.elasticsearch.index.store.StoreFileMetaData;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.index.suggest.stats.ShardSuggestMetric;
|
||||
import org.elasticsearch.index.suggest.stats.SuggestStats;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.index.translog.TranslogConfig;
|
||||
import org.elasticsearch.index.translog.TranslogStats;
|
||||
|
@ -135,7 +133,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
private final ShardRequestCache shardQueryCache;
|
||||
private final ShardFieldData shardFieldData;
|
||||
private final IndexFieldDataService indexFieldDataService;
|
||||
private final ShardSuggestMetric shardSuggestMetric = new ShardSuggestMetric();
|
||||
private final ShardBitsetFilterCache shardBitsetFilterCache;
|
||||
private final Object mutex = new Object();
|
||||
private final String checkIndexOnStartup;
|
||||
|
@ -254,10 +251,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
return this.getService;
|
||||
}
|
||||
|
||||
public ShardSuggestMetric getSuggestMetric() {
|
||||
return shardSuggestMetric;
|
||||
}
|
||||
|
||||
public ShardBitsetFilterCache shardBitsetFilterCache() {
|
||||
return shardBitsetFilterCache;
|
||||
}
|
||||
|
@ -631,10 +624,6 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
return getEngine().getTranslog().stats();
|
||||
}
|
||||
|
||||
public SuggestStats suggestStats() {
|
||||
return shardSuggestMetric.stats();
|
||||
}
|
||||
|
||||
public CompletionStats completionStats(String... fields) {
|
||||
CompletionStats completionStats = new CompletionStats();
|
||||
try (final Engine.Searcher currentSearcher = acquireSearcher("completion_stats")) {
|
||||
|
|
|
@ -65,7 +65,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
|||
for (Map.Entry<String, Settings> entry : similaritySettings.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
// Starting with v5.0 indices, it should no longer be possible to redefine built-in similarities
|
||||
if(BUILT_IN.containsKey(name) && indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0)) {
|
||||
if(BUILT_IN.containsKey(name) && indexSettings.getIndexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
throw new IllegalArgumentException("Cannot redefine built-in Similarity [" + name + "]");
|
||||
}
|
||||
Settings settings = entry.getValue();
|
||||
|
@ -83,7 +83,7 @@ public final class SimilarityService extends AbstractIndexComponent {
|
|||
}
|
||||
for (Map.Entry<String, SimilarityProvider> entry : addSimilarities(similaritySettings, DEFAULTS).entrySet()) {
|
||||
// Avoid overwriting custom providers for indices older that v5.0
|
||||
if (providers.containsKey(entry.getKey()) && indexSettings.getIndexVersionCreated().before(Version.V_5_0_0)) {
|
||||
if (providers.containsKey(entry.getKey()) && indexSettings.getIndexVersionCreated().before(Version.V_5_0_0_alpha1)) {
|
||||
continue;
|
||||
}
|
||||
providers.put(entry.getKey(), entry.getValue());
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.suggest.stats;
|
||||
|
||||
import org.elasticsearch.common.metrics.CounterMetric;
|
||||
import org.elasticsearch.common.metrics.MeanMetric;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class ShardSuggestMetric {
|
||||
private final MeanMetric suggestMetric = new MeanMetric();
|
||||
private final CounterMetric currentMetric = new CounterMetric();
|
||||
|
||||
/**
|
||||
* Called before suggest
|
||||
*/
|
||||
public void preSuggest() {
|
||||
currentMetric.inc();
|
||||
}
|
||||
|
||||
/**
|
||||
* Called after suggest
|
||||
* @param tookInNanos time of suggest used in nanos
|
||||
*/
|
||||
public void postSuggest(long tookInNanos) {
|
||||
currentMetric.dec();
|
||||
suggestMetric.inc(tookInNanos);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The current stats
|
||||
*/
|
||||
public SuggestStats stats() {
|
||||
return new SuggestStats(suggestMetric.count(), TimeUnit.NANOSECONDS.toMillis(suggestMetric.sum()), currentMetric.count());
|
||||
}
|
||||
}
|
|
@ -1,124 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.suggest.stats;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Exposes suggest related statistics.
|
||||
*/
|
||||
public class SuggestStats implements Streamable, ToXContent {
|
||||
|
||||
private long suggestCount;
|
||||
private long suggestTimeInMillis;
|
||||
private long current;
|
||||
|
||||
public SuggestStats() {
|
||||
}
|
||||
|
||||
SuggestStats(long suggestCount, long suggestTimeInMillis, long current) {
|
||||
this.suggestCount = suggestCount;
|
||||
this.suggestTimeInMillis = suggestTimeInMillis;
|
||||
this.current = current;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The number of times the suggest api has been invoked.
|
||||
*/
|
||||
public long getCount() {
|
||||
return suggestCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The total amount of time spend in the suggest api
|
||||
*/
|
||||
public long getTimeInMillis() {
|
||||
return suggestTimeInMillis;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The total amount of time spend in the suggest api
|
||||
*/
|
||||
public TimeValue getTime() {
|
||||
return new TimeValue(getTimeInMillis());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The total amount of active suggest api invocations.
|
||||
*/
|
||||
public long getCurrent() {
|
||||
return current;
|
||||
}
|
||||
|
||||
public void add(SuggestStats suggestStats) {
|
||||
if (suggestStats != null) {
|
||||
suggestCount += suggestStats.getCount();
|
||||
suggestTimeInMillis += suggestStats.getTimeInMillis();
|
||||
current += suggestStats.getCurrent();
|
||||
}
|
||||
}
|
||||
|
||||
public static SuggestStats readSuggestStats(StreamInput in) throws IOException {
|
||||
SuggestStats stats = new SuggestStats();
|
||||
stats.readFrom(in);
|
||||
return stats;
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final XContentBuilderString SUGGEST = new XContentBuilderString("suggest");
|
||||
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
|
||||
static final XContentBuilderString TIME = new XContentBuilderString("time");
|
||||
static final XContentBuilderString TIME_IN_MILLIS = new XContentBuilderString("time_in_millis");
|
||||
static final XContentBuilderString CURRENT = new XContentBuilderString("current");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
suggestCount = in.readVLong();
|
||||
suggestTimeInMillis = in.readVLong();
|
||||
current = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(suggestCount);
|
||||
out.writeVLong(suggestTimeInMillis);
|
||||
out.writeVLong(current);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.SUGGEST);
|
||||
builder.field(Fields.TOTAL, suggestCount);
|
||||
builder.timeValueField(Fields.TIME_IN_MILLIS, Fields.TIME, suggestTimeInMillis);
|
||||
builder.field(Fields.CURRENT, current);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
|
@ -44,7 +44,6 @@ import org.elasticsearch.index.refresh.RefreshStats;
|
|||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.index.suggest.stats.SuggestStats;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionStats;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -149,11 +148,6 @@ public class NodeIndicesStats implements Streamable, ToXContent {
|
|||
return stats.getSegments();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public SuggestStats getSuggest() {
|
||||
return stats.getSuggest();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public RecoveryStats getRecoveryStats() {
|
||||
return stats.getRecoveryStats();
|
||||
|
|
|
@ -20,9 +20,9 @@ package org.elasticsearch.plugins;
|
|||
|
||||
public class DummyPluginInfo extends PluginInfo {
|
||||
|
||||
private DummyPluginInfo(String name, String description, String version, String classname, boolean isolated) {
|
||||
super(name, description, version, classname, isolated);
|
||||
private DummyPluginInfo(String name, String description, String version, String classname) {
|
||||
super(name, description, version, classname);
|
||||
}
|
||||
|
||||
public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName", true);
|
||||
public static final DummyPluginInfo INSTANCE = new DummyPluginInfo("dummy_plugin_name", "dummy plugin description", "dummy_plugin_version", "DummyPluginName");
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.plugins;
|
|||
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -56,6 +55,7 @@ import java.util.HashSet;
|
|||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
@ -183,11 +183,18 @@ class InstallPluginCommand extends Command {
|
|||
final String version = Version.CURRENT.toString();
|
||||
final String url;
|
||||
if (System.getProperty(PROPERTY_SUPPORT_STAGING_URLS, "false").equals("true")) {
|
||||
url = String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/staging/%1$s-%2$s/org/elasticsearch/plugin/%3$s/%1$s/%3$s-%1$s.zip",
|
||||
version, Build.CURRENT.shortHash(), pluginId);
|
||||
url = String.format(
|
||||
Locale.ROOT,
|
||||
"https://download.elastic.co/elasticsearch/staging/%1$s-%2$s/org/elasticsearch/plugin/%3$s/%1$s/%3$s-%1$s.zip",
|
||||
version,
|
||||
Build.CURRENT.shortHash(),
|
||||
pluginId);
|
||||
} else {
|
||||
url = String.format(Locale.ROOT, "https://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%1$s/%2$s/%1$s-%2$s.zip",
|
||||
pluginId, version);
|
||||
url = String.format(
|
||||
Locale.ROOT,
|
||||
"https://download.elastic.co/elasticsearch/release/org/elasticsearch/plugin/%1$s/%2$s/%1$s-%2$s.zip",
|
||||
pluginId,
|
||||
version);
|
||||
}
|
||||
terminal.println("-> Downloading " + pluginId + " from elastic");
|
||||
return downloadZipAndChecksum(url, tmpDir);
|
||||
|
@ -243,21 +250,8 @@ class InstallPluginCommand extends Command {
|
|||
|
||||
private Path unzip(Path zip, Path pluginsDir) throws IOException, UserError {
|
||||
// unzip plugin to a staging temp dir
|
||||
final Path target;
|
||||
if (Constants.WINDOWS) {
|
||||
target = Files.createTempDirectory(pluginsDir, ".installing-");
|
||||
} else {
|
||||
Set<PosixFilePermission> perms = new HashSet<>();
|
||||
perms.add(PosixFilePermission.OWNER_EXECUTE);
|
||||
perms.add(PosixFilePermission.OWNER_READ);
|
||||
perms.add(PosixFilePermission.OWNER_WRITE);
|
||||
perms.add(PosixFilePermission.GROUP_READ);
|
||||
perms.add(PosixFilePermission.GROUP_EXECUTE);
|
||||
perms.add(PosixFilePermission.OTHERS_READ);
|
||||
perms.add(PosixFilePermission.OTHERS_EXECUTE);
|
||||
target = Files.createTempDirectory(pluginsDir, ".installing-", PosixFilePermissions.asFileAttribute(perms));
|
||||
}
|
||||
Files.createDirectories(target);
|
||||
|
||||
final Path target = stagingDirectory(pluginsDir);
|
||||
|
||||
boolean hasEsDir = false;
|
||||
// TODO: we should wrap this in a try/catch and try deleting the target dir on failure?
|
||||
|
@ -302,6 +296,39 @@ class InstallPluginCommand extends Command {
|
|||
return target;
|
||||
}
|
||||
|
||||
private Path stagingDirectory(Path pluginsDir) throws IOException {
|
||||
try {
|
||||
Set<PosixFilePermission> perms = new HashSet<>();
|
||||
perms.add(PosixFilePermission.OWNER_EXECUTE);
|
||||
perms.add(PosixFilePermission.OWNER_READ);
|
||||
perms.add(PosixFilePermission.OWNER_WRITE);
|
||||
perms.add(PosixFilePermission.GROUP_READ);
|
||||
perms.add(PosixFilePermission.GROUP_EXECUTE);
|
||||
perms.add(PosixFilePermission.OTHERS_READ);
|
||||
perms.add(PosixFilePermission.OTHERS_EXECUTE);
|
||||
return Files.createTempDirectory(pluginsDir, ".installing-", PosixFilePermissions.asFileAttribute(perms));
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Jimfs throws an IAE where it should throw an UOE
|
||||
// remove when google/jimfs#30 is integrated into Jimfs
|
||||
// and the Jimfs test dependency is upgraded to include
|
||||
// this pull request
|
||||
final StackTraceElement[] elements = e.getStackTrace();
|
||||
if (elements.length >= 1 &&
|
||||
elements[0].getClassName().equals("com.google.common.jimfs.AttributeService") &&
|
||||
elements[0].getMethodName().equals("setAttributeInternal")) {
|
||||
return stagingDirectoryWithoutPosixPermissions(pluginsDir);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
} catch (UnsupportedOperationException e) {
|
||||
return stagingDirectoryWithoutPosixPermissions(pluginsDir);
|
||||
}
|
||||
}
|
||||
|
||||
private Path stagingDirectoryWithoutPosixPermissions(Path pluginsDir) throws IOException {
|
||||
return Files.createTempDirectory(pluginsDir, ".installing-");
|
||||
}
|
||||
|
||||
/** Load information about the plugin, and verify it can be installed with no errors. */
|
||||
private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch) throws Exception {
|
||||
// read and validate the plugin descriptor
|
||||
|
@ -315,7 +342,7 @@ class InstallPluginCommand extends Command {
|
|||
}
|
||||
|
||||
// check for jar hell before any copying
|
||||
jarHellCheck(pluginRoot, env.pluginsFile(), info.isIsolated());
|
||||
jarHellCheck(pluginRoot, env.pluginsFile());
|
||||
|
||||
// read optional security policy (extra permissions)
|
||||
// if it exists, confirm or warn the user
|
||||
|
@ -328,19 +355,13 @@ class InstallPluginCommand extends Command {
|
|||
}
|
||||
|
||||
/** check a candidate plugin for jar hell before installing it */
|
||||
private void jarHellCheck(Path candidate, Path pluginsDir, boolean isolated) throws Exception {
|
||||
void jarHellCheck(Path candidate, Path pluginsDir) throws Exception {
|
||||
// create list of current jars in classpath
|
||||
final List<URL> jars = new ArrayList<>();
|
||||
jars.addAll(Arrays.asList(JarHell.parseClassPath()));
|
||||
|
||||
// read existing bundles. this does some checks on the installation too.
|
||||
List<PluginsService.Bundle> bundles = PluginsService.getPluginBundles(pluginsDir);
|
||||
|
||||
// if we aren't isolated, we need to jarhellcheck against any other non-isolated plugins
|
||||
// that's always the first bundle
|
||||
if (isolated == false) {
|
||||
jars.addAll(bundles.get(0).urls);
|
||||
}
|
||||
PluginsService.getPluginBundles(pluginsDir);
|
||||
|
||||
// add plugin jars to the list
|
||||
Path pluginJars[] = FileSystemUtils.files(candidate, "*.jar");
|
||||
|
@ -367,7 +388,10 @@ class InstallPluginCommand extends Command {
|
|||
|
||||
final Path destination = env.pluginsFile().resolve(info.getName());
|
||||
if (Files.exists(destination)) {
|
||||
throw new UserError(ExitCodes.USAGE, "plugin directory " + destination.toAbsolutePath() + " already exists. To update the plugin, uninstall it first using 'remove " + info.getName() + "' command");
|
||||
throw new UserError(
|
||||
ExitCodes.USAGE,
|
||||
"plugin directory " + destination.toAbsolutePath() +
|
||||
" already exists. To update the plugin, uninstall it first using 'remove " + info.getName() + "' command");
|
||||
}
|
||||
|
||||
Path tmpBinDir = tmpRoot.resolve("bin");
|
||||
|
@ -404,30 +428,30 @@ class InstallPluginCommand extends Command {
|
|||
}
|
||||
Files.createDirectory(destBinDir);
|
||||
|
||||
Set<PosixFilePermission> perms = new HashSet<>();
|
||||
if (Constants.WINDOWS == false) {
|
||||
// setup file attributes for the installed files to those of the parent dir
|
||||
PosixFileAttributeView binAttrs = Files.getFileAttributeView(destBinDir.getParent(), PosixFileAttributeView.class);
|
||||
if (binAttrs != null) {
|
||||
perms = new HashSet<>(binAttrs.readAttributes().permissions());
|
||||
// setting execute bits, since this just means "the file is executable", and actual execution requires read
|
||||
perms.add(PosixFilePermission.OWNER_EXECUTE);
|
||||
perms.add(PosixFilePermission.GROUP_EXECUTE);
|
||||
perms.add(PosixFilePermission.OTHERS_EXECUTE);
|
||||
}
|
||||
// setup file attributes for the installed files to those of the parent dir
|
||||
final Set<PosixFilePermission> perms = new HashSet<>();
|
||||
final PosixFileAttributeView binAttributeView = Files.getFileAttributeView(destBinDir.getParent(), PosixFileAttributeView.class);
|
||||
if (binAttributeView != null) {
|
||||
perms.addAll(binAttributeView.readAttributes().permissions());
|
||||
// setting execute bits, since this just means "the file is executable", and actual execution requires read
|
||||
perms.add(PosixFilePermission.OWNER_EXECUTE);
|
||||
perms.add(PosixFilePermission.GROUP_EXECUTE);
|
||||
perms.add(PosixFilePermission.OTHERS_EXECUTE);
|
||||
}
|
||||
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(tmpBinDir)) {
|
||||
for (Path srcFile : stream) {
|
||||
if (Files.isDirectory(srcFile)) {
|
||||
throw new UserError(ExitCodes.DATA_ERROR, "Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName());
|
||||
throw new UserError(
|
||||
ExitCodes.DATA_ERROR,
|
||||
"Directories not allowed in bin dir for plugin " + info.getName() + ", found " + srcFile.getFileName());
|
||||
}
|
||||
|
||||
Path destFile = destBinDir.resolve(tmpBinDir.relativize(srcFile));
|
||||
Files.copy(srcFile, destFile);
|
||||
|
||||
if (perms.isEmpty() == false) {
|
||||
PosixFileAttributeView view = Files.getFileAttributeView(destFile, PosixFileAttributeView.class);
|
||||
final PosixFileAttributeView view = Files.getFileAttributeView(destFile, PosixFileAttributeView.class);
|
||||
if (view != null) {
|
||||
view.setPermissions(perms);
|
||||
}
|
||||
}
|
||||
|
@ -446,15 +470,12 @@ class InstallPluginCommand extends Command {
|
|||
|
||||
// create the plugin's config dir "if necessary"
|
||||
Files.createDirectories(destConfigDir);
|
||||
|
||||
final PosixFileAttributes destConfigDirAttributes;
|
||||
if (Constants.WINDOWS) {
|
||||
destConfigDirAttributes = null;
|
||||
} else {
|
||||
destConfigDirAttributes =
|
||||
Files.getFileAttributeView(destConfigDir.getParent(), PosixFileAttributeView.class).readAttributes();
|
||||
final PosixFileAttributeView destConfigDirAttributesView =
|
||||
Files.getFileAttributeView(destConfigDir.getParent(), PosixFileAttributeView.class);
|
||||
final PosixFileAttributes destConfigDirAttributes =
|
||||
destConfigDirAttributesView != null ? destConfigDirAttributesView.readAttributes() : null;
|
||||
if (destConfigDirAttributes != null) {
|
||||
setOwnerGroup(destConfigDir, destConfigDirAttributes);
|
||||
|
||||
}
|
||||
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(tmpConfigDir)) {
|
||||
|
@ -466,7 +487,7 @@ class InstallPluginCommand extends Command {
|
|||
Path destFile = destConfigDir.resolve(tmpConfigDir.relativize(srcFile));
|
||||
if (Files.exists(destFile) == false) {
|
||||
Files.copy(srcFile, destFile);
|
||||
if (Constants.WINDOWS == false) {
|
||||
if (destConfigDirAttributes != null) {
|
||||
setOwnerGroup(destFile, destConfigDirAttributes);
|
||||
}
|
||||
}
|
||||
|
@ -475,8 +496,10 @@ class InstallPluginCommand extends Command {
|
|||
IOUtils.rm(tmpConfigDir); // clean up what we just copied
|
||||
}
|
||||
|
||||
private static void setOwnerGroup(Path path, PosixFileAttributes attributes) throws IOException {
|
||||
private static void setOwnerGroup(final Path path, final PosixFileAttributes attributes) throws IOException {
|
||||
Objects.requireNonNull(attributes);
|
||||
PosixFileAttributeView fileAttributeView = Files.getFileAttributeView(path, PosixFileAttributeView.class);
|
||||
assert fileAttributeView != null;
|
||||
fileAttributeView.setOwner(attributes.owner());
|
||||
fileAttributeView.setGroup(attributes.group());
|
||||
}
|
||||
|
|
|
@ -44,14 +44,12 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
static final XContentBuilderString URL = new XContentBuilderString("url");
|
||||
static final XContentBuilderString VERSION = new XContentBuilderString("version");
|
||||
static final XContentBuilderString CLASSNAME = new XContentBuilderString("classname");
|
||||
static final XContentBuilderString ISOLATED = new XContentBuilderString("isolated");
|
||||
}
|
||||
|
||||
private String name;
|
||||
private String description;
|
||||
private String version;
|
||||
private String classname;
|
||||
private boolean isolated;
|
||||
|
||||
public PluginInfo() {
|
||||
}
|
||||
|
@ -63,12 +61,11 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
* @param description Its description
|
||||
* @param version Version number
|
||||
*/
|
||||
PluginInfo(String name, String description, String version, String classname, boolean isolated) {
|
||||
PluginInfo(String name, String description, String version, String classname) {
|
||||
this.name = name;
|
||||
this.description = description;
|
||||
this.version = version;
|
||||
this.classname = classname;
|
||||
this.isolated = isolated;
|
||||
}
|
||||
|
||||
/** reads (and validates) plugin metadata descriptor file */
|
||||
|
@ -106,13 +103,12 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
}
|
||||
JarHell.checkVersionFormat(javaVersionString);
|
||||
JarHell.checkJavaVersion(name, javaVersionString);
|
||||
boolean isolated = Boolean.parseBoolean(props.getProperty("isolated", "true"));
|
||||
String classname = props.getProperty("classname");
|
||||
if (classname == null) {
|
||||
throw new IllegalArgumentException("Property [classname] is missing for plugin [" + name + "]");
|
||||
}
|
||||
|
||||
return new PluginInfo(name, description, version, classname, isolated);
|
||||
return new PluginInfo(name, description, version, classname);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -129,13 +125,6 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
return description;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if plugin has isolated classloader
|
||||
*/
|
||||
public boolean isIsolated() {
|
||||
return isolated;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return plugin's classname
|
||||
*/
|
||||
|
@ -162,7 +151,6 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
this.description = in.readString();
|
||||
this.version = in.readString();
|
||||
this.classname = in.readString();
|
||||
this.isolated = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -171,7 +159,6 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
out.writeString(description);
|
||||
out.writeString(version);
|
||||
out.writeString(classname);
|
||||
out.writeBoolean(isolated);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -181,7 +168,6 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
builder.field(Fields.VERSION, version);
|
||||
builder.field(Fields.DESCRIPTION, description);
|
||||
builder.field(Fields.CLASSNAME, classname);
|
||||
builder.field(Fields.ISOLATED, isolated);
|
||||
builder.endObject();
|
||||
|
||||
return builder;
|
||||
|
@ -212,8 +198,7 @@ public class PluginInfo implements Streamable, ToXContent {
|
|||
.append("Name: ").append(name).append("\n")
|
||||
.append("Description: ").append(description).append("\n")
|
||||
.append("Version: ").append(version).append("\n")
|
||||
.append(" * Classname: ").append(classname).append("\n")
|
||||
.append(" * Isolated: ").append(isolated);
|
||||
.append(" * Classname: ").append(classname);
|
||||
|
||||
return information.toString();
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ public class PluginsService extends AbstractComponent {
|
|||
// first we load plugins that are on the classpath. this is for tests and transport clients
|
||||
for (Class<? extends Plugin> pluginClass : classpathPlugins) {
|
||||
Plugin plugin = loadPlugin(pluginClass, settings);
|
||||
PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), "NA", pluginClass.getName(), false);
|
||||
PluginInfo pluginInfo = new PluginInfo(plugin.name(), plugin.description(), "NA", pluginClass.getName());
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("plugin loaded from classpath [{}]", pluginInfo);
|
||||
}
|
||||
|
@ -302,9 +302,6 @@ public class PluginsService extends AbstractComponent {
|
|||
continue; // skip over .DS_Store etc
|
||||
}
|
||||
PluginInfo info = PluginInfo.readFromProperties(module);
|
||||
if (!info.isIsolated()) {
|
||||
throw new IllegalStateException("modules must be isolated: " + info);
|
||||
}
|
||||
Bundle bundle = new Bundle();
|
||||
bundle.plugins.add(info);
|
||||
// gather urls for jar files
|
||||
|
@ -329,8 +326,6 @@ public class PluginsService extends AbstractComponent {
|
|||
}
|
||||
|
||||
List<Bundle> bundles = new ArrayList<>();
|
||||
// a special purgatory for plugins that directly depend on each other
|
||||
bundles.add(new Bundle());
|
||||
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(pluginsDirectory)) {
|
||||
for (Path plugin : stream) {
|
||||
|
@ -354,13 +349,8 @@ public class PluginsService extends AbstractComponent {
|
|||
urls.add(jar.toRealPath().toUri().toURL());
|
||||
}
|
||||
}
|
||||
final Bundle bundle;
|
||||
if (info.isIsolated() == false) {
|
||||
bundle = bundles.get(0); // purgatory
|
||||
} else {
|
||||
bundle = new Bundle();
|
||||
bundles.add(bundle);
|
||||
}
|
||||
final Bundle bundle = new Bundle();
|
||||
bundles.add(bundle);
|
||||
bundle.plugins.add(info);
|
||||
bundle.urls.addAll(urls);
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -51,6 +52,7 @@ public class RestListTasksAction extends BaseRestHandler {
|
|||
String[] actions = Strings.splitStringByCommaToArray(request.param("actions"));
|
||||
TaskId parentTaskId = new TaskId(request.param("parent_task_id"));
|
||||
boolean waitForCompletion = request.paramAsBoolean("wait_for_completion", false);
|
||||
TimeValue timeout = request.paramAsTime("timeout", null);
|
||||
|
||||
ListTasksRequest listTasksRequest = new ListTasksRequest();
|
||||
listTasksRequest.setTaskId(taskId);
|
||||
|
@ -59,6 +61,7 @@ public class RestListTasksAction extends BaseRestHandler {
|
|||
listTasksRequest.setActions(actions);
|
||||
listTasksRequest.setParentTaskId(parentTaskId);
|
||||
listTasksRequest.setWaitForCompletion(waitForCompletion);
|
||||
listTasksRequest.setTimeout(timeout);
|
||||
client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ public class RestIndicesStatsAction extends BaseRestHandler {
|
|||
indicesStatsRequest.docs(metrics.contains("docs"));
|
||||
indicesStatsRequest.store(metrics.contains("store"));
|
||||
indicesStatsRequest.indexing(metrics.contains("indexing"));
|
||||
indicesStatsRequest.search(metrics.contains("search"));
|
||||
indicesStatsRequest.search(metrics.contains("search") || metrics.contains("suggest"));
|
||||
indicesStatsRequest.get(metrics.contains("get"));
|
||||
indicesStatsRequest.merge(metrics.contains("merge"));
|
||||
indicesStatsRequest.refresh(metrics.contains("refresh"));
|
||||
|
@ -82,7 +82,6 @@ public class RestIndicesStatsAction extends BaseRestHandler {
|
|||
indicesStatsRequest.segments(metrics.contains("segments"));
|
||||
indicesStatsRequest.fieldData(metrics.contains("fielddata"));
|
||||
indicesStatsRequest.completion(metrics.contains("completion"));
|
||||
indicesStatsRequest.suggest(metrics.contains("suggest"));
|
||||
indicesStatsRequest.requestCache(metrics.contains("request_cache"));
|
||||
indicesStatsRequest.recovery(metrics.contains("recovery"));
|
||||
indicesStatsRequest.translog(metrics.contains("translog"));
|
||||
|
|
|
@ -490,14 +490,14 @@ public class RestIndicesAction extends AbstractCatAction {
|
|||
table.addCell(indexStats == null ? null : indexStats.getTotal().getWarmer().totalTime());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getWarmer().totalTime());
|
||||
|
||||
table.addCell(indexStats == null ? null : indexStats.getTotal().getSuggest().getCurrent());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSuggest().getCurrent());
|
||||
table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getSuggestCurrent());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getSuggestCurrent());
|
||||
|
||||
table.addCell(indexStats == null ? null : indexStats.getTotal().getSuggest().getTime());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSuggest().getTime());
|
||||
table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getSuggestTime());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getSuggestTime());
|
||||
|
||||
table.addCell(indexStats == null ? null : indexStats.getTotal().getSuggest().getCount());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSuggest().getCount());
|
||||
table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getSuggestCount());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getSuggestCount());
|
||||
|
||||
table.addCell(indexStats == null ? null : indexStats.getTotal().getTotalMemory());
|
||||
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getTotalMemory());
|
||||
|
|
|
@ -49,7 +49,6 @@ import org.elasticsearch.index.merge.MergeStats;
|
|||
import org.elasticsearch.index.percolator.PercolatorQueryCacheStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.suggest.stats.SuggestStats;
|
||||
import org.elasticsearch.indices.NodeIndicesStats;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
@ -362,10 +361,9 @@ public class RestNodesAction extends AbstractCatAction {
|
|||
table.addCell(segmentsStats == null ? null : segmentsStats.getVersionMapMemory());
|
||||
table.addCell(segmentsStats == null ? null : segmentsStats.getBitsetMemory());
|
||||
|
||||
SuggestStats suggestStats = indicesStats == null ? null : indicesStats.getSuggest();
|
||||
table.addCell(suggestStats == null ? null : suggestStats.getCurrent());
|
||||
table.addCell(suggestStats == null ? null : suggestStats.getTime());
|
||||
table.addCell(suggestStats == null ? null : suggestStats.getCount());
|
||||
table.addCell(searchStats == null ? null : searchStats.getTotal().getSuggestCurrent());
|
||||
table.addCell(searchStats == null ? null : searchStats.getTotal().getSuggestTime());
|
||||
table.addCell(searchStats == null ? null : searchStats.getTotal().getSuggestCount());
|
||||
|
||||
table.endRow();
|
||||
}
|
||||
|
|
|
@ -66,7 +66,6 @@ public class RestThreadPoolAction extends AbstractCatAction {
|
|||
ThreadPool.Names.REFRESH,
|
||||
ThreadPool.Names.SEARCH,
|
||||
ThreadPool.Names.SNAPSHOT,
|
||||
ThreadPool.Names.SUGGEST,
|
||||
ThreadPool.Names.WARMER
|
||||
};
|
||||
|
||||
|
@ -81,7 +80,6 @@ public class RestThreadPoolAction extends AbstractCatAction {
|
|||
"r",
|
||||
"s",
|
||||
"sn",
|
||||
"su",
|
||||
"w"
|
||||
};
|
||||
|
||||
|
|
|
@ -19,14 +19,15 @@
|
|||
|
||||
package org.elasticsearch.rest.action.suggest;
|
||||
|
||||
import org.elasticsearch.action.suggest.SuggestRequest;
|
||||
import org.elasticsearch.action.suggest.SuggestResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -41,6 +42,7 @@ import org.elasticsearch.rest.RestResponse;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.rest.action.support.RestActions;
|
||||
import org.elasticsearch.rest.action.support.RestBuilderListener;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
|
@ -73,28 +75,29 @@ public class RestSuggestAction extends BaseRestHandler {
|
|||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws IOException {
|
||||
SuggestRequest suggestRequest = new SuggestRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
suggestRequest.indicesOptions(IndicesOptions.fromRequest(request, suggestRequest.indicesOptions()));
|
||||
final SearchRequest searchRequest = new SearchRequest(Strings.splitStringByCommaToArray(request.param("index")), new SearchSourceBuilder());
|
||||
searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions()));
|
||||
if (RestActions.hasBodyContent(request)) {
|
||||
final BytesReference sourceBytes = RestActions.getRestContent(request);
|
||||
try (XContentParser parser = XContentFactory.xContent(sourceBytes).createParser(sourceBytes)) {
|
||||
final QueryParseContext context = new QueryParseContext(queryRegistry);
|
||||
context.reset(parser);
|
||||
context.parseFieldMatcher(parseFieldMatcher);
|
||||
suggestRequest.suggest(SuggestBuilder.fromXContent(context, suggesters));
|
||||
searchRequest.source().suggest(SuggestBuilder.fromXContent(context, suggesters));
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("no content or source provided to execute suggestion");
|
||||
}
|
||||
suggestRequest.routing(request.param("routing"));
|
||||
suggestRequest.preference(request.param("preference"));
|
||||
|
||||
client.suggest(suggestRequest, new RestBuilderListener<SuggestResponse>(channel) {
|
||||
searchRequest.routing(request.param("routing"));
|
||||
searchRequest.preference(request.param("preference"));
|
||||
client.search(searchRequest, new RestBuilderListener<SearchResponse>(channel) {
|
||||
@Override
|
||||
public RestResponse buildResponse(SuggestResponse response, XContentBuilder builder) throws Exception {
|
||||
RestStatus restStatus = RestStatus.status(response.getSuccessfulShards(), response.getTotalShards(), response.getShardFailures());
|
||||
public RestResponse buildResponse(SearchResponse response, XContentBuilder builder) throws Exception {
|
||||
RestStatus restStatus = RestStatus.status(response.getSuccessfulShards(),
|
||||
response.getTotalShards(), response.getShardFailures());
|
||||
builder.startObject();
|
||||
buildBroadcastShardsHeader(builder, request, response);
|
||||
buildBroadcastShardsHeader(builder, request, response.getTotalShards(),
|
||||
response.getSuccessfulShards(), response.getFailedShards(), response.getShardFailures());
|
||||
Suggest suggest = response.getSuggest();
|
||||
if (suggest != null) {
|
||||
suggest.toInnerXContent(builder, request);
|
||||
|
|
|
@ -226,6 +226,11 @@ import org.elasticsearch.search.highlight.Highlighters;
|
|||
import org.elasticsearch.search.query.QueryPhase;
|
||||
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||
import org.elasticsearch.search.sort.GeoDistanceSortBuilder;
|
||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||
import org.elasticsearch.search.sort.ScriptSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggester;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
|
@ -346,6 +351,7 @@ public class SearchModule extends AbstractModule {
|
|||
configureFetchSubPhase();
|
||||
configureShapes();
|
||||
configureRescorers();
|
||||
configureSorts();
|
||||
}
|
||||
|
||||
protected void configureFetchSubPhase() {
|
||||
|
@ -489,6 +495,13 @@ public class SearchModule extends AbstractModule {
|
|||
namedWriteableRegistry.registerPrototype(RescoreBuilder.class, QueryRescorerBuilder.PROTOTYPE);
|
||||
}
|
||||
|
||||
private void configureSorts() {
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE);
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, ScoreSortBuilder.PROTOTYPE);
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, ScriptSortBuilder.PROTOTYPE);
|
||||
namedWriteableRegistry.registerPrototype(SortBuilder.class, FieldSortBuilder.PROTOTYPE);
|
||||
}
|
||||
|
||||
private void registerBuiltinFunctionScoreParsers() {
|
||||
registerFunctionScoreParser(new ScriptScoreFunctionParser());
|
||||
registerFunctionScoreParser(new GaussDecayFunctionParser());
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search;
|
|||
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
|
@ -41,7 +42,6 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -92,6 +92,7 @@ import org.elasticsearch.search.query.QuerySearchResultProvider;
|
|||
import org.elasticsearch.search.query.ScrollQuerySearchResult;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
|
@ -99,6 +100,7 @@ import java.io.IOException;
|
|||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
@ -683,33 +685,13 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
|
||||
}
|
||||
if (source.sorts() != null) {
|
||||
XContentParser completeSortParser = null;
|
||||
try {
|
||||
XContentBuilder completeSortBuilder = XContentFactory.jsonBuilder();
|
||||
completeSortBuilder.startObject();
|
||||
completeSortBuilder.startArray("sort");
|
||||
for (BytesReference sort : source.sorts()) {
|
||||
XContentParser parser = XContentFactory.xContent(sort).createParser(sort);
|
||||
parser.nextToken();
|
||||
completeSortBuilder.copyCurrentStructure(parser);
|
||||
Optional<Sort> optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext());
|
||||
if (optionalSort.isPresent()) {
|
||||
context.sort(optionalSort.get());
|
||||
}
|
||||
completeSortBuilder.endArray();
|
||||
completeSortBuilder.endObject();
|
||||
BytesReference completeSortBytes = completeSortBuilder.bytes();
|
||||
completeSortParser = XContentFactory.xContent(completeSortBytes).createParser(completeSortBytes);
|
||||
completeSortParser.nextToken();
|
||||
completeSortParser.nextToken();
|
||||
completeSortParser.nextToken();
|
||||
this.elementParsers.get("sort").parse(completeSortParser, context);
|
||||
} catch (Exception e) {
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
sSource = source.toString();
|
||||
} catch (Throwable e1) {
|
||||
// ignore
|
||||
}
|
||||
XContentLocation location = completeSortParser != null ? completeSortParser.getTokenLocation() : null;
|
||||
throw new SearchParseException(context, "failed to parse sort source [" + sSource + "]", location, e);
|
||||
} catch (IOException e) {
|
||||
throw new SearchContextException(context, "failed to create sort elements", e);
|
||||
}
|
||||
}
|
||||
context.trackScores(source.trackScores());
|
||||
|
|
|
@ -21,13 +21,9 @@ package org.elasticsearch.search.aggregations.metrics.tophits;
|
|||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.AggregationInitializationException;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
|
@ -38,6 +34,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
@ -57,7 +54,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
private boolean explain = false;
|
||||
private boolean version = false;
|
||||
private boolean trackScores = false;
|
||||
private List<BytesReference> sorts = null;
|
||||
private List<SortBuilder<?>> sorts = null;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private List<String> fieldNames;
|
||||
private List<String> fieldDataFields;
|
||||
|
@ -119,6 +116,9 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
if (order == null) {
|
||||
throw new IllegalArgumentException("sort [order] must not be null: [" + name + "]");
|
||||
}
|
||||
if (name.equals(ScoreSortBuilder.NAME)) {
|
||||
sort(SortBuilders.scoreSort().order(order));
|
||||
}
|
||||
sort(SortBuilders.fieldSort(name).order(order));
|
||||
return this;
|
||||
}
|
||||
|
@ -133,6 +133,9 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
if (name == null) {
|
||||
throw new IllegalArgumentException("sort [name] must not be null: [" + name + "]");
|
||||
}
|
||||
if (name.equals(ScoreSortBuilder.NAME)) {
|
||||
sort(SortBuilders.scoreSort());
|
||||
}
|
||||
sort(SortBuilders.fieldSort(name));
|
||||
return this;
|
||||
}
|
||||
|
@ -140,39 +143,28 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* Adds a sort builder.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder sort(SortBuilder sort) {
|
||||
public TopHitsAggregatorBuilder sort(SortBuilder<?> sort) {
|
||||
if (sort == null) {
|
||||
throw new IllegalArgumentException("[sort] must not be null: [" + name + "]");
|
||||
}
|
||||
try {
|
||||
if (sorts == null) {
|
||||
if (sorts == null) {
|
||||
sorts = new ArrayList<>();
|
||||
}
|
||||
// NORELEASE when sort has been refactored and made writeable
|
||||
// add the sortBuilcer to the List directly instead of
|
||||
// serialising to XContent
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
sort.toXContent(builder, EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
sorts.add(builder.bytes());
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
sorts.add(sort);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a sort builder.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder sorts(List<BytesReference> sorts) {
|
||||
public TopHitsAggregatorBuilder sorts(List<SortBuilder<?>> sorts) {
|
||||
if (sorts == null) {
|
||||
throw new IllegalArgumentException("[sorts] must not be null: [" + name + "]");
|
||||
}
|
||||
if (this.sorts == null) {
|
||||
this.sorts = new ArrayList<>();
|
||||
}
|
||||
for (BytesReference sort : sorts) {
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
this.sorts.add(sort);
|
||||
}
|
||||
return this;
|
||||
|
@ -181,7 +173,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* Gets the bytes representing the sort builders for this request.
|
||||
*/
|
||||
public List<BytesReference> sorts() {
|
||||
public List<SortBuilder<?>> sorts() {
|
||||
return sorts;
|
||||
}
|
||||
|
||||
|
@ -509,10 +501,8 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
}
|
||||
if (sorts != null) {
|
||||
builder.startArray(SearchSourceBuilder.SORT_FIELD.getPreferredName());
|
||||
for (BytesReference sort : sorts) {
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(sort);
|
||||
parser.nextToken();
|
||||
builder.copyCurrentStructure(parser);
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
sort.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
@ -562,9 +552,9 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
factory.size = in.readVInt();
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
List<BytesReference> sorts = new ArrayList<>();
|
||||
List<SortBuilder<?>> sorts = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
sorts.add(in.readBytesReference());
|
||||
sorts.add(in.readSortBuilder());
|
||||
}
|
||||
factory.sorts = sorts;
|
||||
}
|
||||
|
@ -612,8 +602,8 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
out.writeBoolean(hasSorts);
|
||||
if (hasSorts) {
|
||||
out.writeVInt(sorts.size());
|
||||
for (BytesReference sort : sorts) {
|
||||
out.writeBytesReference(sort);
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
out.writeSortBuilder(sort);
|
||||
}
|
||||
}
|
||||
out.writeBoolean(trackScores);
|
||||
|
|
|
@ -19,12 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.tophits;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
|
@ -35,27 +30,27 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
import org.elasticsearch.search.sort.SortParseElement;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregatorFactory> {
|
||||
|
||||
private static final SortParseElement sortParseElement = new SortParseElement();
|
||||
private final int from;
|
||||
private final int size;
|
||||
private final boolean explain;
|
||||
private final boolean version;
|
||||
private final boolean trackScores;
|
||||
private final List<BytesReference> sorts;
|
||||
private final List<SortBuilder<?>> sorts;
|
||||
private final HighlightBuilder highlightBuilder;
|
||||
private final List<String> fieldNames;
|
||||
private final List<String> fieldDataFields;
|
||||
|
@ -63,7 +58,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
private final FetchSourceContext fetchSourceContext;
|
||||
|
||||
public TopHitsAggregatorFactory(String name, Type type, int from, int size, boolean explain, boolean version, boolean trackScores,
|
||||
List<BytesReference> sorts, HighlightBuilder highlightBuilder, List<String> fieldNames, List<String> fieldDataFields,
|
||||
List<SortBuilder<?>> sorts, HighlightBuilder highlightBuilder, List<String> fieldNames, List<String> fieldDataFields,
|
||||
List<ScriptField> scriptFields, FetchSourceContext fetchSourceContext, AggregationContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
|
||||
super(name, type, context, parent, subFactories, metaData);
|
||||
|
@ -90,27 +85,9 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
subSearchContext.from(from);
|
||||
subSearchContext.size(size);
|
||||
if (sorts != null) {
|
||||
XContentParser completeSortParser = null;
|
||||
try {
|
||||
XContentBuilder completeSortBuilder = XContentFactory.jsonBuilder();
|
||||
completeSortBuilder.startObject();
|
||||
completeSortBuilder.startArray("sort");
|
||||
for (BytesReference sort : sorts) {
|
||||
XContentParser parser = XContentFactory.xContent(sort).createParser(sort);
|
||||
parser.nextToken();
|
||||
completeSortBuilder.copyCurrentStructure(parser);
|
||||
}
|
||||
completeSortBuilder.endArray();
|
||||
completeSortBuilder.endObject();
|
||||
BytesReference completeSortBytes = completeSortBuilder.bytes();
|
||||
completeSortParser = XContentFactory.xContent(completeSortBytes).createParser(completeSortBytes);
|
||||
completeSortParser.nextToken();
|
||||
completeSortParser.nextToken();
|
||||
completeSortParser.nextToken();
|
||||
sortParseElement.parse(completeSortParser, subSearchContext);
|
||||
} catch (Exception e) {
|
||||
XContentLocation location = completeSortParser != null ? completeSortParser.getTokenLocation() : null;
|
||||
throw new ParsingException(location, "failed to parse sort source in aggregation [" + name + "]", e);
|
||||
Optional<Sort> optionalSort = SortBuilder.buildSort(sorts, subSearchContext.getQueryShardContext());
|
||||
if (optionalSort.isPresent()) {
|
||||
subSearchContext.sort(optionalSort.get());
|
||||
}
|
||||
}
|
||||
if (fieldNames != null) {
|
||||
|
|
|
@ -19,9 +19,6 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.tophits;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
@ -30,6 +27,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -124,9 +123,7 @@ public class TopHitsParser implements Aggregator.Parser {
|
|||
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.HIGHLIGHT_FIELD)) {
|
||||
factory.highlighter(HighlightBuilder.PROTOTYPE.fromXContent(context));
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SORT_FIELD)) {
|
||||
List<BytesReference> sorts = new ArrayList<>();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
sorts.add(xContentBuilder.bytes());
|
||||
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context);
|
||||
factory.sorts(sorts);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||
|
@ -157,11 +154,7 @@ public class TopHitsParser implements Aggregator.Parser {
|
|||
}
|
||||
factory.fieldDataFields(fieldDataFields);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SORT_FIELD)) {
|
||||
List<BytesReference> sorts = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
sorts.add(xContentBuilder.bytes());
|
||||
}
|
||||
List<SortBuilder<?>> sorts = SortBuilder.fromXContent(context);
|
||||
factory.sorts(sorts);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
|
||||
factory.fetchSource(FetchSourceContext.parse(parser, context));
|
||||
|
|
|
@ -52,6 +52,7 @@ import org.elasticsearch.search.highlight.HighlightBuilder;
|
|||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
@ -139,7 +140,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
private Boolean version;
|
||||
|
||||
private List<BytesReference> sorts;
|
||||
private List<SortBuilder<?>> sorts;
|
||||
|
||||
private boolean trackScores = false;
|
||||
|
||||
|
@ -336,6 +337,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
* The sort ordering
|
||||
*/
|
||||
public SearchSourceBuilder sort(String name, SortOrder order) {
|
||||
if (name.equals(ScoreSortBuilder.NAME)) {
|
||||
return sort(SortBuilders.scoreSort().order(order));
|
||||
}
|
||||
return sort(SortBuilders.fieldSort(name).order(order));
|
||||
}
|
||||
|
||||
|
@ -346,32 +350,27 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
* The name of the field to sort by
|
||||
*/
|
||||
public SearchSourceBuilder sort(String name) {
|
||||
if (name.equals(ScoreSortBuilder.NAME)) {
|
||||
return sort(SortBuilders.scoreSort());
|
||||
}
|
||||
return sort(SortBuilders.fieldSort(name));
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a sort builder.
|
||||
*/
|
||||
public SearchSourceBuilder sort(SortBuilder sort) {
|
||||
try {
|
||||
public SearchSourceBuilder sort(SortBuilder<?> sort) {
|
||||
if (sorts == null) {
|
||||
sorts = new ArrayList<>();
|
||||
}
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
sort.toXContent(builder, EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
sorts.add(builder.bytes());
|
||||
sorts.add(sort);
|
||||
return this;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the bytes representing the sort builders for this request.
|
||||
*/
|
||||
public List<BytesReference> sorts() {
|
||||
public List<SortBuilder<?>> sorts() {
|
||||
return sorts;
|
||||
}
|
||||
|
||||
|
@ -730,6 +729,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
return ext;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the source only has suggest
|
||||
*/
|
||||
public boolean isSuggestOnly() {
|
||||
return suggestBuilder != null
|
||||
&& queryBuilder == null && aggregations == null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rewrites this search source builder into its primitive form. e.g. by
|
||||
* rewriting the QueryBuilder. If the builder did not change the identity
|
||||
|
@ -907,9 +914,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
} else if (context.parseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) {
|
||||
suggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
sorts = new ArrayList<>();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
sorts.add(xContentBuilder.bytes());
|
||||
sorts = new ArrayList<>(SortBuilder.fromXContent(context));
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, EXT_FIELD)) {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
ext = xContentBuilder.bytes();
|
||||
|
@ -940,11 +945,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
}
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
sorts = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
sorts.add(xContentBuilder.bytes());
|
||||
}
|
||||
sorts = new ArrayList<>(SortBuilder.fromXContent(context));
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) {
|
||||
rescoreBuilders = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -1057,10 +1058,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
if (sorts != null) {
|
||||
builder.startArray(SORT_FIELD.getPreferredName());
|
||||
for (BytesReference sort : sorts) {
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(sort);
|
||||
parser.nextToken();
|
||||
builder.copyCurrentStructure(parser);
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
sort.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
@ -1266,9 +1265,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
builder.size = in.readVInt();
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
List<BytesReference> sorts = new ArrayList<>();
|
||||
List<SortBuilder<?>> sorts = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
sorts.add(in.readBytesReference());
|
||||
sorts.add(in.readSortBuilder());
|
||||
}
|
||||
builder.sorts = sorts;
|
||||
}
|
||||
|
@ -1382,8 +1381,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
out.writeBoolean(hasSorts);
|
||||
if (hasSorts) {
|
||||
out.writeVInt(sorts.size());
|
||||
for (BytesReference sort : sorts) {
|
||||
out.writeBytesReference(sort);
|
||||
for (SortBuilder<?> sort : sorts) {
|
||||
out.writeSortBuilder(sort);
|
||||
}
|
||||
}
|
||||
boolean hasStats = stats != null;
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.search.highlight.HighlighterParseElement;
|
|||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHits;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.sort.SortParseElement;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -51,8 +50,8 @@ public class InnerHitsFetchSubPhase implements FetchSubPhase {
|
|||
private FetchPhase fetchPhase;
|
||||
|
||||
@Inject
|
||||
public InnerHitsFetchSubPhase(SortParseElement sortParseElement, FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement, ScriptFieldsParseElement scriptFieldsParseElement) {
|
||||
parseElements = singletonMap("inner_hits", new InnerHitsParseElement(sortParseElement, sourceParseElement, highlighterParseElement,
|
||||
public InnerHitsFetchSubPhase(FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement, ScriptFieldsParseElement scriptFieldsParseElement) {
|
||||
parseElements = singletonMap("inner_hits", new InnerHitsParseElement(sourceParseElement, highlighterParseElement,
|
||||
fieldDataFieldsParseElement, scriptFieldsParseElement));
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.search.fetch.source.FetchSourceParseElement;
|
|||
import org.elasticsearch.search.highlight.HighlighterParseElement;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
import org.elasticsearch.search.sort.SortParseElement;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -43,14 +42,12 @@ import static org.elasticsearch.index.query.support.InnerHitsQueryParserHelper.p
|
|||
*/
|
||||
public class InnerHitsParseElement implements SearchParseElement {
|
||||
|
||||
private final SortParseElement sortParseElement;
|
||||
private final FetchSourceParseElement sourceParseElement;
|
||||
private final HighlighterParseElement highlighterParseElement;
|
||||
private final FieldDataFieldsParseElement fieldDataFieldsParseElement;
|
||||
private final ScriptFieldsParseElement scriptFieldsParseElement;
|
||||
|
||||
public InnerHitsParseElement(SortParseElement sortParseElement, FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement, ScriptFieldsParseElement scriptFieldsParseElement) {
|
||||
this.sortParseElement = sortParseElement;
|
||||
public InnerHitsParseElement(FetchSourceParseElement sourceParseElement, HighlighterParseElement highlighterParseElement, FieldDataFieldsParseElement fieldDataFieldsParseElement, ScriptFieldsParseElement scriptFieldsParseElement) {
|
||||
this.sourceParseElement = sourceParseElement;
|
||||
this.highlighterParseElement = highlighterParseElement;
|
||||
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
|
||||
|
@ -184,10 +181,10 @@ public class InnerHitsParseElement implements SearchParseElement {
|
|||
} else if ("inner_hits".equals(fieldName)) {
|
||||
childInnerHits = parseInnerHits(parser, context, searchContext);
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sortParseElement, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
} else {
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sortParseElement, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
parseCommonInnerHitOptions(parser, token, fieldName, subSearchContext, sourceParseElement, highlighterParseElement, scriptFieldsParseElement, fieldDataFieldsParseElement);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -191,6 +191,9 @@ public class DefaultSearchContext extends SearchContext {
|
|||
*/
|
||||
@Override
|
||||
public void preProcess() {
|
||||
if (hasOnlySuggest() ) {
|
||||
return;
|
||||
}
|
||||
if (scrollContext == null) {
|
||||
long from = from() == -1 ? 0 : from();
|
||||
long size = size() == -1 ? 10 : size();
|
||||
|
|
|
@ -353,6 +353,14 @@ public abstract class SearchContext implements Releasable {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if the request contains only suggest
|
||||
*/
|
||||
public final boolean hasOnlySuggest() {
|
||||
return request().source() != null
|
||||
&& request().source().isSuggestOnly();
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks up the given field, but does not restrict to fields in the types set on this context.
|
||||
*/
|
||||
|
|
|
@ -58,7 +58,6 @@ import org.elasticsearch.search.profile.ProfileShardResult;
|
|||
import org.elasticsearch.search.profile.Profiler;
|
||||
import org.elasticsearch.search.rescore.RescorePhase;
|
||||
import org.elasticsearch.search.rescore.RescoreSearchContext;
|
||||
import org.elasticsearch.search.sort.SortParseElement;
|
||||
import org.elasticsearch.search.sort.TrackScoresParseElement;
|
||||
import org.elasticsearch.search.suggest.SuggestPhase;
|
||||
|
||||
|
@ -98,7 +97,6 @@ public class QueryPhase implements SearchPhase {
|
|||
parseElements.put("query", new QueryParseElement());
|
||||
parseElements.put("post_filter", new PostFilterParseElement());
|
||||
parseElements.put("postFilter", new PostFilterParseElement());
|
||||
parseElements.put("sort", new SortParseElement());
|
||||
parseElements.put("trackScores", new TrackScoresParseElement());
|
||||
parseElements.put("track_scores", new TrackScoresParseElement());
|
||||
parseElements.put("min_score", new MinScoreParseElement());
|
||||
|
@ -118,6 +116,12 @@ public class QueryPhase implements SearchPhase {
|
|||
|
||||
@Override
|
||||
public void execute(SearchContext searchContext) throws QueryPhaseExecutionException {
|
||||
if (searchContext.hasOnlySuggest()) {
|
||||
suggestPhase.execute(searchContext);
|
||||
// TODO: fix this once we can fetch docs for suggestions
|
||||
searchContext.queryResult().topDocs(new TopDocs(0, Lucene.EMPTY_SCORE_DOCS, 0));
|
||||
return;
|
||||
}
|
||||
// Pre-process aggregations as late as possible. In the case of a DFS_Q_T_F
|
||||
// request, preProcess is called on the DFS phase phase, this is why we pre-process them
|
||||
// here to make sure it happens during the QUERY phase
|
||||
|
|
|
@ -20,12 +20,10 @@
|
|||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -44,7 +42,7 @@ import java.util.Objects;
|
|||
* A sort builder to sort based on a document field.
|
||||
*/
|
||||
public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
||||
static final FieldSortBuilder PROTOTYPE = new FieldSortBuilder("");
|
||||
public static final FieldSortBuilder PROTOTYPE = new FieldSortBuilder("_na_");
|
||||
public static final String NAME = "field_sort";
|
||||
public static final ParseField NESTED_PATH = new ParseField("nested_path");
|
||||
public static final ParseField NESTED_FILTER = new ParseField("nested_filter");
|
||||
|
@ -108,19 +106,12 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
* <tt>_first</tt> to sort missing last or first respectively.
|
||||
*/
|
||||
public FieldSortBuilder missing(Object missing) {
|
||||
if (missing instanceof String) {
|
||||
this.missing = BytesRefs.toBytesRef(missing);
|
||||
} else {
|
||||
this.missing = missing;
|
||||
}
|
||||
this.missing = missing;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns the value used when a field is missing in a doc. */
|
||||
public Object missing() {
|
||||
if (missing instanceof BytesRef) {
|
||||
return ((BytesRef) missing).utf8ToString();
|
||||
}
|
||||
return missing;
|
||||
}
|
||||
|
||||
|
@ -207,14 +198,11 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.startObject(fieldName);
|
||||
builder.field(ORDER_FIELD.getPreferredName(), order);
|
||||
if (missing != null) {
|
||||
if (missing instanceof BytesRef) {
|
||||
builder.field(MISSING.getPreferredName(), ((BytesRef) missing).utf8ToString());
|
||||
} else {
|
||||
builder.field(MISSING.getPreferredName(), missing);
|
||||
}
|
||||
builder.field(MISSING.getPreferredName(), missing);
|
||||
}
|
||||
if (unmappedType != null) {
|
||||
builder.field(UNMAPPED_TYPE.getPreferredName(), unmappedType);
|
||||
|
@ -229,6 +217,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
builder.field(NESTED_PATH.getPreferredName(), nestedPath);
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -375,7 +364,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
|
|||
if (context.parseFieldMatcher().match(currentFieldName, NESTED_PATH)) {
|
||||
nestedPath = parser.text();
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, MISSING)) {
|
||||
missing = parser.objectBytes();
|
||||
missing = parser.objectText();
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, ORDER)) {
|
||||
String sortOrder = parser.text();
|
||||
if ("asc".equals(sortOrder)) {
|
||||
|
|
|
@ -73,7 +73,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
|
||||
public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
|
||||
|
||||
static final GeoDistanceSortBuilder PROTOTYPE = new GeoDistanceSortBuilder("", -1, -1);
|
||||
public static final GeoDistanceSortBuilder PROTOTYPE = new GeoDistanceSortBuilder("_na_", -1, -1);
|
||||
|
||||
private final String fieldName;
|
||||
private final List<GeoPoint> points = new ArrayList<>();
|
||||
|
@ -299,6 +299,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.startObject(NAME);
|
||||
|
||||
builder.startArray(fieldName);
|
||||
|
@ -324,6 +325,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
|
|||
builder.field(COERCE_FIELD.getPreferredName(), coerce);
|
||||
builder.field(IGNORE_MALFORMED_FIELD.getPreferredName(), ignoreMalformed);
|
||||
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -1,220 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.FieldComparator;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class GeoDistanceSortParser implements SortParser {
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{"_geo_distance", "_geoDistance"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortField parse(XContentParser parser, QueryShardContext context) throws IOException {
|
||||
String fieldName = null;
|
||||
List<GeoPoint> geoPoints = new ArrayList<>();
|
||||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||
boolean reverse = false;
|
||||
MultiValueMode sortMode = null;
|
||||
NestedInnerQueryParseSupport nestedHelper = null;
|
||||
|
||||
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
|
||||
boolean coerce = GeoDistanceSortBuilder.DEFAULT_COERCE;
|
||||
boolean ignoreMalformed = GeoDistanceSortBuilder.DEFAULT_IGNORE_MALFORMED;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentName = parser.currentName();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
GeoDistanceSortBuilder.parseGeoPoints(parser, geoPoints);
|
||||
|
||||
fieldName = currentName;
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
if ("nested_filter".equals(currentName) || "nestedFilter".equals(currentName)) {
|
||||
if (nestedHelper == null) {
|
||||
nestedHelper = new NestedInnerQueryParseSupport(parser, context);
|
||||
}
|
||||
nestedHelper.filter();
|
||||
} else {
|
||||
fieldName = currentName;
|
||||
GeoPoint point = new GeoPoint();
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
geoPoints.add(point);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("reverse".equals(currentName)) {
|
||||
reverse = parser.booleanValue();
|
||||
} else if ("order".equals(currentName)) {
|
||||
reverse = "desc".equals(parser.text());
|
||||
} else if (currentName.equals("unit")) {
|
||||
unit = DistanceUnit.fromString(parser.text());
|
||||
} else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
|
||||
geoDistance = GeoDistance.fromString(parser.text());
|
||||
} else if ("coerce".equals(currentName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentName))) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce == true) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if ("ignore_malformed".equals(currentName)) {
|
||||
boolean ignoreMalformedFlag = parser.booleanValue();
|
||||
if (coerce == false) {
|
||||
ignoreMalformed = ignoreMalformedFlag;
|
||||
}
|
||||
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
||||
sortMode = MultiValueMode.fromString(parser.text());
|
||||
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
||||
if (nestedHelper == null) {
|
||||
nestedHelper = new NestedInnerQueryParseSupport(parser, context);
|
||||
}
|
||||
nestedHelper.setPath(parser.text());
|
||||
} else {
|
||||
GeoPoint point = new GeoPoint();
|
||||
point.resetFromString(parser.text());
|
||||
geoPoints.add(point);
|
||||
fieldName = currentName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||
for (GeoPoint point : geoPoints) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new ElasticsearchParseException("illegal latitude value [{}] for [GeoDistanceSort]", point.lat());
|
||||
}
|
||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||
throw new ElasticsearchParseException("illegal longitude value [{}] for [GeoDistanceSort]", point.lon());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (coerce) {
|
||||
for (GeoPoint point : geoPoints) {
|
||||
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||
}
|
||||
}
|
||||
|
||||
if (sortMode == null) {
|
||||
sortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
|
||||
}
|
||||
|
||||
if (sortMode == MultiValueMode.SUM) {
|
||||
throw new IllegalArgumentException("sort_mode [sum] isn't supported for sorting by geo distance");
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = context.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort");
|
||||
}
|
||||
final MultiValueMode finalSortMode = sortMode; // final reference for use in the anonymous class
|
||||
final IndexGeoPointFieldData geoIndexFieldData = context.getForField(fieldType);
|
||||
final FixedSourceDistance[] distances = new FixedSourceDistance[geoPoints.size()];
|
||||
for (int i = 0; i< geoPoints.size(); i++) {
|
||||
distances[i] = geoDistance.fixedSourceDistance(geoPoints.get(i).lat(), geoPoints.get(i).lon(), unit);
|
||||
}
|
||||
|
||||
final Nested nested;
|
||||
if (nestedHelper != null && nestedHelper.getPath() != null) {
|
||||
BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
|
||||
Query innerDocumentsQuery;
|
||||
if (nestedHelper.filterFound()) {
|
||||
// TODO: use queries instead
|
||||
innerDocumentsQuery = nestedHelper.getInnerFilter();
|
||||
} else {
|
||||
innerDocumentsQuery = nestedHelper.getNestedObjectMapper().nestedTypeFilter();
|
||||
}
|
||||
|
||||
nested = new Nested(rootDocumentsFilter, innerDocumentsQuery);
|
||||
} else {
|
||||
nested = null;
|
||||
}
|
||||
|
||||
IndexFieldData.XFieldComparatorSource geoDistanceComparatorSource = new IndexFieldData.XFieldComparatorSource() {
|
||||
|
||||
@Override
|
||||
public SortField.Type reducedType() {
|
||||
return SortField.Type.DOUBLE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
|
||||
return new FieldComparator.DoubleComparator(numHits, null, null) {
|
||||
@Override
|
||||
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
|
||||
final MultiGeoPointValues geoPointValues = geoIndexFieldData.load(context).getGeoPointValues();
|
||||
final SortedNumericDoubleValues distanceValues = GeoDistance.distanceValues(geoPointValues, distances);
|
||||
final NumericDoubleValues selectedValues;
|
||||
if (nested == null) {
|
||||
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE);
|
||||
} else {
|
||||
final BitSet rootDocs = nested.rootDocs(context);
|
||||
final DocIdSetIterator innerDocs = nested.innerDocs(context);
|
||||
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE, rootDocs, innerDocs, context.reader().maxDoc());
|
||||
}
|
||||
return selectedValues.getRawDoubleValues();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
return new SortField(fieldName, geoDistanceComparatorSource, reverse);
|
||||
}
|
||||
|
||||
}
|
|
@ -39,7 +39,7 @@ import java.util.Objects;
|
|||
public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
|
||||
|
||||
public static final String NAME = "_score";
|
||||
static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder();
|
||||
public static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder();
|
||||
public static final ParseField ORDER_FIELD = new ParseField("order");
|
||||
private static final SortField SORT_SCORE = new SortField(null, SortField.Type.SCORE);
|
||||
private static final SortField SORT_SCORE_REVERSE = new SortField(null, SortField.Type.SCORE, true);
|
||||
|
@ -52,9 +52,11 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.startObject(NAME);
|
||||
builder.field(ORDER_FIELD.getPreferredName(), order);
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue