mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-30 20:08:29 +00:00
Refactors building query specific objects from the term suggestion
builder and merges changes between the suggestion builders to context object implementations.
This commit is contained in:
commit
eed557742f
buildSrc/src/main
groovy/org/elasticsearch/gradle/plugin
resources
core/src
main/java/org/elasticsearch
action
ActionModule.java
admin
cluster/node/tasks
cancel
CancelTasksAction.javaCancelTasksRequest.javaCancelTasksRequestBuilder.javaCancelTasksResponse.javaTransportCancelTasksAction.java
list
indices/alias
suggest
support
client
cluster
common
hash
io/stream
lucene/search/function
FieldValueFactorFunction.javaRandomScoreFunction.javaScoreFunction.javaScriptScoreFunction.javaWeightFactorFunction.java
network
xcontent/support
env
index
ingest/core
plugins
rest/action/admin
cluster/node/tasks
indices/alias
search
sort
GeoDistanceSortBuilder.javaGeoDistanceSortParser.javaSortBuilders.javaSortElementParserTemp.javaSortOrder.java
suggest
DirectSpellcheckerSettings.javaSortBy.javaSuggest.javaSuggestBuilder.javaSuggestParseElement.javaSuggestUtils.javaSuggesters.javaSuggestionBuilder.javaSuggestionSearchContext.java
completion
CompletionSuggestParser.javaCompletionSuggester.javaCompletionSuggestionBuilder.javaCompletionSuggestionContext.java
phrase
DirectCandidateGeneratorBuilder.javaLaplace.javaLinearInterpolation.javaPhraseSuggestParser.javaPhraseSuggester.javaPhraseSuggestionBuilder.javaPhraseSuggestionContext.javaSmoothingModel.javaStupidBackoff.java
term
tasks
transport
test/java/org/elasticsearch
action/admin/cluster/node/tasks
CancellableTasksTests.javaTaskManagerTestCase.javaTasksIT.javaTestTaskPlugin.javaTransportTasksActionTests.java
aliases
index
query
similarity
indices/mapping
search
@ -112,6 +112,9 @@ public class PluginBuildPlugin extends BuildPlugin {
|
||||
include 'config/**'
|
||||
include 'bin/**'
|
||||
}
|
||||
if (project.path.startsWith(':modules:') == false) {
|
||||
into('elasticsearch')
|
||||
}
|
||||
}
|
||||
project.assemble.dependsOn(bundle)
|
||||
|
||||
|
@ -37,6 +37,8 @@
|
||||
hard to distinguish from the digit 1 (one). -->
|
||||
<module name="UpperEll"/>
|
||||
|
||||
<module name="EqualsHashCode" />
|
||||
|
||||
<!-- We don't use Java's builtin serialization and we suppress all warning
|
||||
about it. The flip side of that coin is that we shouldn't _try_ to use
|
||||
it. We can't outright ban it with ForbiddenApis because it complain about
|
||||
|
@ -673,7 +673,6 @@
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]ingest[/\\]PipelineExecutionService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]ingest[/\\]PipelineStore.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]ingest[/\\]core[/\\]CompoundProcessor.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]ingest[/\\]core[/\\]ConfigurationUtils.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]ingest[/\\]core[/\\]IngestDocument.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]ingest[/\\]core[/\\]Pipeline.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]ingest[/\\]processor[/\\]ConvertProcessor.java" checks="LineLength" />
|
||||
|
@ -128,4 +128,6 @@ java.util.Collections#EMPTY_SET
|
||||
java.util.Collections#shuffle(java.util.List) @ Use java.util.Collections#shuffle(java.util.List, java.util.Random) with a reproducible source of randomness
|
||||
@defaultMessage Use org.elasticsearch.common.Randomness#get for reproducible sources of randomness
|
||||
java.util.Random#<init>()
|
||||
java.util.concurrent.ThreadLocalRandom
|
||||
java.util.concurrent.ThreadLocalRandom
|
||||
|
||||
java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests
|
||||
|
@ -1,13 +1,14 @@
|
||||
# Elasticsearch plugin descriptor file
|
||||
# This file must exist as 'plugin-descriptor.properties' at
|
||||
# the root directory of all plugins.
|
||||
# This file must exist as 'plugin-descriptor.properties' in a folder named `elasticsearch`
|
||||
# inside all plugins.
|
||||
#
|
||||
### example plugin for "foo"
|
||||
#
|
||||
# foo.zip <-- zip file for the plugin, with this structure:
|
||||
# <arbitrary name1>.jar <-- classes, resources, dependencies
|
||||
# <arbitrary nameN>.jar <-- any number of jars
|
||||
# plugin-descriptor.properties <-- example contents below:
|
||||
#|____elasticsearch/
|
||||
#| |____ <arbitrary name1>.jar <-- classes, resources, dependencies
|
||||
#| |____ <arbitrary nameN>.jar <-- any number of jars
|
||||
#| |____ plugin-descriptor.properties <-- example contents below:
|
||||
#
|
||||
# classname=foo.bar.BazPlugin
|
||||
# description=My cool plugin
|
||||
|
@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction
|
||||
import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction;
|
||||
@ -268,6 +270,7 @@ public class ActionModule extends AbstractModule {
|
||||
registerAction(NodesStatsAction.INSTANCE, TransportNodesStatsAction.class);
|
||||
registerAction(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class);
|
||||
registerAction(ListTasksAction.INSTANCE, TransportListTasksAction.class);
|
||||
registerAction(CancelTasksAction.INSTANCE, TransportCancelTasksAction.class);
|
||||
|
||||
registerAction(ClusterStatsAction.INSTANCE, TransportClusterStatsAction.class);
|
||||
registerAction(ClusterStateAction.INSTANCE, TransportClusterStateAction.class);
|
||||
|
46
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java
Normal file
46
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java
Normal file
@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks.cancel;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
|
||||
/**
|
||||
* Action for cancelling running tasks
|
||||
*/
|
||||
public class CancelTasksAction extends Action<CancelTasksRequest, CancelTasksResponse, CancelTasksRequestBuilder> {
|
||||
|
||||
public static final CancelTasksAction INSTANCE = new CancelTasksAction();
|
||||
public static final String NAME = "cluster:admin/tasks/cancel";
|
||||
|
||||
private CancelTasksAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CancelTasksResponse newResponse() {
|
||||
return new CancelTasksResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
public CancelTasksRequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new CancelTasksRequestBuilder(client, this);
|
||||
}
|
||||
}
|
73
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java
Normal file
73
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks.cancel;
|
||||
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.tasks.CancellableTask;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A request to cancel tasks
|
||||
*/
|
||||
public class CancelTasksRequest extends BaseTasksRequest<CancelTasksRequest> {
|
||||
|
||||
public static final String DEFAULT_REASON = "by user request";
|
||||
|
||||
private String reason = DEFAULT_REASON;
|
||||
|
||||
/**
|
||||
* Cancel tasks on the specified nodes. If none are passed, all cancellable tasks on
|
||||
* all nodes will be cancelled.
|
||||
*/
|
||||
public CancelTasksRequest(String... nodesIds) {
|
||||
super(nodesIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
reason = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(reason);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean match(Task task) {
|
||||
return super.match(task) && task instanceof CancellableTask;
|
||||
}
|
||||
|
||||
public CancelTasksRequest reason(String reason) {
|
||||
this.reason = reason;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String reason() {
|
||||
return reason;
|
||||
}
|
||||
}
|
34
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java
Normal file
34
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java
Normal file
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks.cancel;
|
||||
|
||||
import org.elasticsearch.action.support.tasks.TasksRequestBuilder;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
|
||||
/**
|
||||
* Builder for the request to cancel tasks running on the specified nodes
|
||||
*/
|
||||
public class CancelTasksRequestBuilder extends TasksRequestBuilder<CancelTasksRequest, CancelTasksResponse, CancelTasksRequestBuilder> {
|
||||
|
||||
public CancelTasksRequestBuilder(ElasticsearchClient client, CancelTasksAction action) {
|
||||
super(client, action, new CancelTasksRequest());
|
||||
}
|
||||
|
||||
}
|
42
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java
Normal file
42
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java
Normal file
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks.cancel;
|
||||
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Returns the list of tasks that were cancelled
|
||||
*/
|
||||
public class CancelTasksResponse extends ListTasksResponse {
|
||||
|
||||
public CancelTasksResponse() {
|
||||
}
|
||||
|
||||
public CancelTasksResponse(List<TaskInfo> tasks, List<TaskOperationFailure> taskFailures, List<? extends FailedNodeException>
|
||||
nodeFailures) {
|
||||
super(tasks, taskFailures, nodeFailures);
|
||||
}
|
||||
|
||||
}
|
285
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java
Normal file
285
core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java
Normal file
@ -0,0 +1,285 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks.cancel;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||
import org.elasticsearch.action.support.tasks.TransportTasksAction;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.CancellableTask;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.EmptyTransportResponseHandler;
|
||||
import org.elasticsearch.transport.TransportChannel;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
import org.elasticsearch.transport.TransportRequestHandler;
|
||||
import org.elasticsearch.transport.TransportResponse;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
* Transport action that can be used to cancel currently running cancellable tasks.
|
||||
* <p>
|
||||
* For a task to be cancellable it has to return an instance of
|
||||
* {@link CancellableTask} from {@link TransportRequest#createTask(long, String, String)}
|
||||
*/
|
||||
public class TransportCancelTasksAction extends TransportTasksAction<CancellableTask, CancelTasksRequest, CancelTasksResponse, TaskInfo> {
|
||||
|
||||
public static final String BAN_PARENT_ACTION_NAME = "internal:admin/tasks/ban";
|
||||
|
||||
@Inject
|
||||
public TransportCancelTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService,
|
||||
TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver
|
||||
indexNameExpressionResolver) {
|
||||
super(settings, CancelTasksAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters,
|
||||
indexNameExpressionResolver, CancelTasksRequest::new, CancelTasksResponse::new, ThreadPool.Names.MANAGEMENT);
|
||||
transportService.registerRequestHandler(BAN_PARENT_ACTION_NAME, BanParentTaskRequest::new, ThreadPool.Names.SAME, new
|
||||
BanParentRequestHandler());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CancelTasksResponse newResponse(CancelTasksRequest request, List<TaskInfo> tasks, List<TaskOperationFailure>
|
||||
taskOperationFailures, List<FailedNodeException> failedNodeExceptions) {
|
||||
return new CancelTasksResponse(tasks, taskOperationFailures, failedNodeExceptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TaskInfo readTaskResponse(StreamInput in) throws IOException {
|
||||
return new TaskInfo(in);
|
||||
}
|
||||
|
||||
protected void processTasks(CancelTasksRequest request, Consumer<CancellableTask> operation) {
|
||||
if (request.taskId() != BaseTasksRequest.ALL_TASKS) {
|
||||
// we are only checking one task, we can optimize it
|
||||
CancellableTask task = taskManager.getCancellableTask(request.taskId());
|
||||
if (task != null) {
|
||||
if (request.match(task)) {
|
||||
operation.accept(task);
|
||||
} else {
|
||||
throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support this operation");
|
||||
}
|
||||
} else {
|
||||
if (taskManager.getTask(request.taskId()) != null) {
|
||||
// The task exists, but doesn't support cancellation
|
||||
throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support cancellation");
|
||||
} else {
|
||||
throw new ResourceNotFoundException("task [{}] doesn't support cancellation", request.taskId());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (CancellableTask task : taskManager.getCancellableTasks().values()) {
|
||||
if (request.match(task)) {
|
||||
operation.accept(task);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected synchronized TaskInfo taskOperation(CancelTasksRequest request, CancellableTask cancellableTask) {
|
||||
final BanLock banLock = new BanLock(nodes -> removeBanOnNodes(cancellableTask, nodes));
|
||||
Set<String> childNodes = taskManager.cancel(cancellableTask, request.reason(), banLock::onTaskFinished);
|
||||
if (childNodes != null) {
|
||||
if (childNodes.isEmpty()) {
|
||||
logger.trace("cancelling task {} with no children", cancellableTask.getId());
|
||||
return cancellableTask.taskInfo(clusterService.localNode(), false);
|
||||
} else {
|
||||
logger.trace("cancelling task {} with children on nodes [{}]", cancellableTask.getId(), childNodes);
|
||||
setBanOnNodes(request.reason(), cancellableTask, childNodes, banLock);
|
||||
return cancellableTask.taskInfo(clusterService.localNode(), false);
|
||||
}
|
||||
} else {
|
||||
logger.trace("task {} is already cancelled", cancellableTask.getId());
|
||||
throw new IllegalStateException("task with id " + cancellableTask.getId() + " is already cancelled");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean accumulateExceptions() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private void setBanOnNodes(String reason, CancellableTask task, Set<String> nodes, BanLock banLock) {
|
||||
sendSetBanRequest(nodes, new BanParentTaskRequest(clusterService.localNode().getId(), task.getId(), reason), banLock);
|
||||
}
|
||||
|
||||
private void removeBanOnNodes(CancellableTask task, Set<String> nodes) {
|
||||
sendRemoveBanRequest(nodes, new BanParentTaskRequest(clusterService.localNode().getId(), task.getId()));
|
||||
}
|
||||
|
||||
private void sendSetBanRequest(Set<String> nodes, BanParentTaskRequest request, BanLock banLock) {
|
||||
ClusterState clusterState = clusterService.state();
|
||||
for (String node : nodes) {
|
||||
DiscoveryNode discoveryNode = clusterState.getNodes().get(node);
|
||||
if (discoveryNode != null) {
|
||||
// Check if node still in the cluster
|
||||
logger.debug("Sending ban for tasks with the parent [{}:{}] to the node [{}], ban [{}]", request.parentNodeId, request
|
||||
.parentTaskId, node, request.ban);
|
||||
transportService.sendRequest(discoveryNode, BAN_PARENT_ACTION_NAME, request,
|
||||
new EmptyTransportResponseHandler(ThreadPool.Names.SAME) {
|
||||
@Override
|
||||
public void handleResponse(TransportResponse.Empty response) {
|
||||
banLock.onBanSet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
banLock.onBanSet();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
banLock.onBanSet();
|
||||
logger.debug("Cannot send ban for tasks with the parent [{}:{}] to the node [{}] - the node no longer in the cluster",
|
||||
request.parentNodeId, request.parentTaskId, node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void sendRemoveBanRequest(Set<String> nodes, BanParentTaskRequest request) {
|
||||
ClusterState clusterState = clusterService.state();
|
||||
for (String node : nodes) {
|
||||
DiscoveryNode discoveryNode = clusterState.getNodes().get(node);
|
||||
if (discoveryNode != null) {
|
||||
// Check if node still in the cluster
|
||||
logger.debug("Sending remove ban for tasks with the parent [{}:{}] to the node [{}]", request.parentNodeId,
|
||||
request.parentTaskId, node);
|
||||
transportService.sendRequest(discoveryNode, BAN_PARENT_ACTION_NAME, request, EmptyTransportResponseHandler
|
||||
.INSTANCE_SAME);
|
||||
} else {
|
||||
logger.debug("Cannot send remove ban request for tasks with the parent [{}:{}] to the node [{}] - the node no longer in " +
|
||||
"the cluster", request.parentNodeId, request.parentTaskId, node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static class BanLock {
|
||||
private final Consumer<Set<String>> finish;
|
||||
private final AtomicInteger counter;
|
||||
private final AtomicReference<Set<String>> nodes = new AtomicReference<>();
|
||||
|
||||
public BanLock(Consumer<Set<String>> finish) {
|
||||
counter = new AtomicInteger(0);
|
||||
this.finish = finish;
|
||||
}
|
||||
|
||||
public void onBanSet() {
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
public void onTaskFinished(Set<String> nodes) {
|
||||
this.nodes.set(nodes);
|
||||
if (counter.addAndGet(nodes.size()) == 0) {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
public void finish() {
|
||||
finish.accept(nodes.get());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class BanParentTaskRequest extends TransportRequest {
|
||||
|
||||
private String parentNodeId;
|
||||
|
||||
private long parentTaskId;
|
||||
|
||||
private boolean ban;
|
||||
|
||||
private String reason;
|
||||
|
||||
BanParentTaskRequest(String parentNodeId, long parentTaskId, String reason) {
|
||||
this.parentNodeId = parentNodeId;
|
||||
this.parentTaskId = parentTaskId;
|
||||
this.ban = true;
|
||||
this.reason = reason;
|
||||
}
|
||||
|
||||
BanParentTaskRequest(String parentNodeId, long parentTaskId) {
|
||||
this.parentNodeId = parentNodeId;
|
||||
this.parentTaskId = parentTaskId;
|
||||
this.ban = false;
|
||||
}
|
||||
|
||||
public BanParentTaskRequest() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
parentNodeId = in.readString();
|
||||
parentTaskId = in.readLong();
|
||||
ban = in.readBoolean();
|
||||
if (ban) {
|
||||
reason = in.readString();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(parentNodeId);
|
||||
out.writeLong(parentTaskId);
|
||||
out.writeBoolean(ban);
|
||||
if (ban) {
|
||||
out.writeString(reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class BanParentRequestHandler implements TransportRequestHandler<BanParentTaskRequest> {
|
||||
@Override
|
||||
public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel) throws Exception {
|
||||
if (request.ban) {
|
||||
logger.debug("Received ban for the parent [{}:{}] on the node [{}], reason: [{}]", request.parentNodeId, request
|
||||
.parentTaskId, clusterService.localNode().getId(), request.reason);
|
||||
taskManager.setBan(request.parentNodeId, request.parentTaskId, request.reason);
|
||||
} else {
|
||||
logger.debug("Removing ban for the parent [{}:{}] on the node [{}]", request.parentNodeId, request.parentTaskId,
|
||||
clusterService.localNode().getId());
|
||||
taskManager.removeBan(request.parentNodeId, request.parentTaskId);
|
||||
}
|
||||
channel.sendResponse(TransportResponse.Empty.INSTANCE);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -105,7 +105,9 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
|
||||
if (getTaskFailures() != null && getTaskFailures().size() > 0) {
|
||||
builder.startArray("task_failures");
|
||||
for (TaskOperationFailure ex : getTaskFailures()){
|
||||
builder.startObject();
|
||||
builder.value(ex);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
@ -113,7 +115,9 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
|
||||
if (getNodeFailures() != null && getNodeFailures().size() > 0) {
|
||||
builder.startArray("node_failures");
|
||||
for (FailedNodeException ex : getNodeFailures()) {
|
||||
builder.value(ex);
|
||||
builder.startObject();
|
||||
ex.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
|
@ -30,17 +30,17 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskManager;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TransportListTasksAction extends TransportTasksAction<ListTasksRequest, ListTasksResponse, TaskInfo> {
|
||||
public class TransportListTasksAction extends TransportTasksAction<Task, ListTasksRequest, ListTasksResponse, TaskInfo> {
|
||||
|
||||
@Inject
|
||||
public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
|
@ -286,24 +286,25 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
|
||||
return addValidationError("Must specify at least one alias action", validationException);
|
||||
}
|
||||
for (AliasActions aliasAction : allAliasActions) {
|
||||
if (aliasAction.aliases.length == 0) {
|
||||
if (CollectionUtils.isEmpty(aliasAction.aliases)) {
|
||||
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
|
||||
+ "]: aliases may not be empty", validationException);
|
||||
}
|
||||
for (String alias : aliasAction.aliases) {
|
||||
if (!Strings.hasText(alias)) {
|
||||
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
|
||||
+ "]: [alias] may not be empty string", validationException);
|
||||
+ "]: Property [alias/aliases] is either missing or null", validationException);
|
||||
} else {
|
||||
for (String alias : aliasAction.aliases) {
|
||||
if (!Strings.hasText(alias)) {
|
||||
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
|
||||
+ "]: [alias/aliases] may not be empty string", validationException);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (CollectionUtils.isEmpty(aliasAction.indices)) {
|
||||
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
|
||||
+ "]: Property [index] was either missing or null", validationException);
|
||||
+ "]: Property [index/indices] is either missing or null", validationException);
|
||||
} else {
|
||||
for (String index : aliasAction.indices) {
|
||||
if (!Strings.hasText(index)) {
|
||||
validationException = addValidationError("Alias action [" + aliasAction.actionType().name().toLowerCase(Locale.ENGLISH)
|
||||
+ "]: [index] may not be empty string", validationException);
|
||||
+ "]: [index/indices] may not be empty string", validationException);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ public class SuggestRequestBuilder extends BroadcastOperationRequestBuilder<Sugg
|
||||
}
|
||||
|
||||
public SuggestRequestBuilder setSuggestText(String globalText) {
|
||||
this.suggest.setText(globalText);
|
||||
this.suggest.setGlobalText(globalText);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -44,20 +44,6 @@ public abstract class ChildTaskActionRequest<Request extends ActionRequest<Reque
|
||||
this.parentTaskId = parentTaskId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The node that owns the parent task.
|
||||
*/
|
||||
public String getParentTaskNode() {
|
||||
return parentTaskNode;
|
||||
}
|
||||
|
||||
/**
|
||||
* The task id of the parent task on the parent node.
|
||||
*/
|
||||
public long getParentTaskId() {
|
||||
return parentTaskId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
@ -73,8 +59,12 @@ public abstract class ChildTaskActionRequest<Request extends ActionRequest<Reque
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action) {
|
||||
return new Task(id, type, action, this::getDescription, parentTaskNode, parentTaskId);
|
||||
public final Task createTask(long id, String type, String action) {
|
||||
return createTask(id, type, action, parentTaskNode, parentTaskId);
|
||||
}
|
||||
|
||||
public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) {
|
||||
return new Task(id, type, action, getDescription(), parentTaskNode, parentTaskId);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -58,7 +58,11 @@ public class ChildTaskRequest extends TransportRequest {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action) {
|
||||
return new Task(id, type, action, this::getDescription, parentTaskNode, parentTaskId);
|
||||
public final Task createTask(long id, String type, String action) {
|
||||
return createTask(id, type, action, parentTaskNode, parentTaskId);
|
||||
}
|
||||
|
||||
public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) {
|
||||
return new Task(id, type, action, getDescription(), parentTaskNode, parentTaskId);
|
||||
}
|
||||
}
|
||||
|
@ -87,6 +87,10 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
|
||||
|
||||
protected abstract ShardResponse shardOperation(ShardRequest request);
|
||||
|
||||
protected ShardResponse shardOperation(ShardRequest request, Task task) {
|
||||
return shardOperation(request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the shards this operation will be executed on. The operation is executed once per shard iterator, typically
|
||||
* on the first shard in it. If the operation fails, it will be retried on the next shard in the iterator.
|
||||
@ -172,6 +176,7 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
|
||||
// no node connected, act as failure
|
||||
onOperation(shard, shardIt, shardIndex, new NoShardAvailableActionException(shardIt.shardId()));
|
||||
} else {
|
||||
taskManager.registerChildTask(task, node.getId());
|
||||
transportService.sendRequest(node, transportShardAction, shardRequest, new BaseTransportResponseHandler<ShardResponse>() {
|
||||
@Override
|
||||
public ShardResponse newInstance() {
|
||||
@ -278,8 +283,13 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
|
||||
class ShardTransportHandler implements TransportRequestHandler<ShardRequest> {
|
||||
|
||||
@Override
|
||||
public void messageReceived(final ShardRequest request, final TransportChannel channel) throws Exception {
|
||||
public void messageReceived(ShardRequest request, TransportChannel channel, Task task) throws Exception {
|
||||
channel.sendResponse(shardOperation(request));
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void messageReceived(final ShardRequest request, final TransportChannel channel) throws Exception {
|
||||
throw new UnsupportedOperationException("the task parameter is required");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -301,6 +301,7 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
||||
NodeRequest nodeRequest = new NodeRequest(node.getId(), request, shards);
|
||||
if (task != null) {
|
||||
nodeRequest.setParentTask(clusterService.localNode().id(), task.getId());
|
||||
taskManager.registerChildTask(task, node.getId());
|
||||
}
|
||||
transportService.sendRequest(node, transportNodeBroadcastAction, nodeRequest, new BaseTransportResponseHandler<NodeResponse>() {
|
||||
@Override
|
||||
|
@ -159,6 +159,7 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
|
||||
}
|
||||
}
|
||||
};
|
||||
taskManager.registerChildTask(task, nodes.getLocalNodeId());
|
||||
threadPool.executor(executor).execute(new ActionRunnable(delegate) {
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
@ -171,6 +172,7 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
|
||||
logger.debug("no known master node, scheduling a retry");
|
||||
retry(null, MasterNodeChangePredicate.INSTANCE);
|
||||
} else {
|
||||
taskManager.registerChildTask(task, nodes.masterNode().getId());
|
||||
transportService.sendRequest(nodes.masterNode(), actionName, request, new ActionListenerResponseHandler<Response>(listener) {
|
||||
@Override
|
||||
public Response newInstance() {
|
||||
|
@ -95,6 +95,10 @@ public abstract class TransportNodesAction<NodesRequest extends BaseNodesRequest
|
||||
|
||||
protected abstract NodeResponse nodeOperation(NodeRequest request);
|
||||
|
||||
protected NodeResponse nodeOperation(NodeRequest request, Task task) {
|
||||
return nodeOperation(request);
|
||||
}
|
||||
|
||||
protected abstract boolean accumulateExceptions();
|
||||
|
||||
protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) {
|
||||
@ -163,6 +167,7 @@ public abstract class TransportNodesAction<NodesRequest extends BaseNodesRequest
|
||||
ChildTaskRequest nodeRequest = newNodeRequest(nodeId, request);
|
||||
if (task != null) {
|
||||
nodeRequest.setParentTask(clusterService.localNode().id(), task.getId());
|
||||
taskManager.registerChildTask(task, node.getId());
|
||||
}
|
||||
|
||||
transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), new BaseTransportResponseHandler<NodeResponse>() {
|
||||
@ -228,8 +233,14 @@ public abstract class TransportNodesAction<NodesRequest extends BaseNodesRequest
|
||||
class NodeTransportHandler implements TransportRequestHandler<NodeRequest> {
|
||||
|
||||
@Override
|
||||
public void messageReceived(final NodeRequest request, final TransportChannel channel) throws Exception {
|
||||
public void messageReceived(NodeRequest request, TransportChannel channel, Task task) throws Exception {
|
||||
channel.sendResponse(nodeOperation(request, task));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void messageReceived(NodeRequest request, TransportChannel channel) throws Exception {
|
||||
channel.sendResponse(nodeOperation(request));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -196,8 +196,8 @@ public abstract class ReplicationRequest<Request extends ReplicationRequest<Requ
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action) {
|
||||
return new ReplicationTask(id, type, action, this::getDescription, getParentTaskNode(), getParentTaskId());
|
||||
public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) {
|
||||
return new ReplicationTask(id, type, action, getDescription(), parentTaskNode, parentTaskId);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -218,4 +218,9 @@ public abstract class ReplicationRequest<Request extends ReplicationRequest<Requ
|
||||
return index;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return toString();
|
||||
}
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ import static java.util.Objects.requireNonNull;
|
||||
public class ReplicationTask extends Task {
|
||||
private volatile String phase = "starting";
|
||||
|
||||
public ReplicationTask(long id, String type, String action, Provider<String> description, String parentNode, long parentId) {
|
||||
public ReplicationTask(long id, String type, String action, String description, String parentNode, long parentId) {
|
||||
super(id, type, action, description, parentNode, parentId);
|
||||
}
|
||||
|
||||
|
@ -121,6 +121,7 @@ public abstract class TransportBroadcastReplicationAction<Request extends Broadc
|
||||
protected void shardExecute(Task task, Request request, ShardId shardId, ActionListener<ShardResponse> shardActionListener) {
|
||||
ShardRequest shardRequest = newShardRequest(request, shardId);
|
||||
shardRequest.setParentTask(clusterService.localNode().getId(), task.getId());
|
||||
taskManager.registerChildTask(task, clusterService.localNode().getId());
|
||||
replicatedBroadcastShardAction.execute(shardRequest, shardActionListener);
|
||||
}
|
||||
|
||||
|
@ -486,6 +486,7 @@ public abstract class TransportReplicationAction<Request extends ReplicationRequ
|
||||
return;
|
||||
}
|
||||
final DiscoveryNode node = state.nodes().get(primary.currentNodeId());
|
||||
taskManager.registerChildTask(task, node.getId());
|
||||
if (primary.currentNodeId().equals(state.nodes().localNodeId())) {
|
||||
setPhase(task, "waiting_on_primary");
|
||||
if (logger.isTraceEnabled()) {
|
||||
|
@ -35,7 +35,6 @@ import java.io.IOException;
|
||||
*/
|
||||
public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends ActionRequest<Request> {
|
||||
|
||||
|
||||
public static final String[] ALL_ACTIONS = Strings.EMPTY_ARRAY;
|
||||
|
||||
public static final String[] ALL_NODES = Strings.EMPTY_ARRAY;
|
||||
@ -52,6 +51,8 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
||||
|
||||
private long parentTaskId = ALL_TASKS;
|
||||
|
||||
private long taskId = ALL_TASKS;
|
||||
|
||||
public BaseTasksRequest() {
|
||||
}
|
||||
|
||||
@ -94,6 +95,22 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
||||
return (Request) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the id of the task that should be processed.
|
||||
*
|
||||
* By default tasks with any ids are returned.
|
||||
*/
|
||||
public long taskId() {
|
||||
return taskId;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public final Request taskId(long taskId) {
|
||||
this.taskId = taskId;
|
||||
return (Request) this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the parent node id that tasks should be filtered by
|
||||
*/
|
||||
@ -141,6 +158,7 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
nodesIds = in.readStringArray();
|
||||
taskId = in.readLong();
|
||||
actions = in.readStringArray();
|
||||
parentNode = in.readOptionalString();
|
||||
parentTaskId = in.readLong();
|
||||
@ -153,6 +171,7 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeStringArrayNullable(nodesIds);
|
||||
out.writeLong(taskId);
|
||||
out.writeStringArrayNullable(actions);
|
||||
out.writeOptionalString(parentNode);
|
||||
out.writeLong(parentTaskId);
|
||||
@ -163,12 +182,17 @@ public class BaseTasksRequest<Request extends BaseTasksRequest<Request>> extends
|
||||
if (actions() != null && actions().length > 0 && Regex.simpleMatch(actions(), task.getAction()) == false) {
|
||||
return false;
|
||||
}
|
||||
if (taskId() != ALL_TASKS) {
|
||||
if(taskId() != task.getId()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (parentNode() != null) {
|
||||
if (parentNode().equals(task.getParentNode()) == false) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (parentTaskId() != BaseTasksRequest.ALL_TASKS) {
|
||||
if (parentTaskId() != ALL_TASKS) {
|
||||
if (parentTaskId() != task.getParentId()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.action.support.tasks;
|
||||
|
||||
import org.elasticsearch.ResourceNotFoundException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.NoSuchNodeException;
|
||||
@ -53,12 +54,14 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* The base class for transport actions that are interacting with currently running tasks.
|
||||
*/
|
||||
public abstract class TransportTasksAction<
|
||||
OperationTask extends Task,
|
||||
TasksRequest extends BaseTasksRequest<TasksRequest>,
|
||||
TasksResponse extends BaseTasksResponse,
|
||||
TaskResponse extends Writeable<TaskResponse>
|
||||
@ -103,16 +106,16 @@ public abstract class TransportTasksAction<
|
||||
TasksRequest request = nodeTaskRequest.tasksRequest;
|
||||
List<TaskResponse> results = new ArrayList<>();
|
||||
List<TaskOperationFailure> exceptions = new ArrayList<>();
|
||||
for (Task task : taskManager.getTasks().values()) {
|
||||
// First check action and node filters
|
||||
if (request.match(task)) {
|
||||
try {
|
||||
results.add(taskOperation(request, task));
|
||||
} catch (Exception ex) {
|
||||
exceptions.add(new TaskOperationFailure(clusterService.localNode().id(), task.getId(), ex));
|
||||
processTasks(request, task -> {
|
||||
try {
|
||||
TaskResponse response = taskOperation(request, task);
|
||||
if (response != null) {
|
||||
results.add(response);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
exceptions.add(new TaskOperationFailure(clusterService.localNode().id(), task.getId(), ex));
|
||||
}
|
||||
}
|
||||
});
|
||||
return new NodeTasksResponse(clusterService.localNode().id(), results, exceptions);
|
||||
}
|
||||
|
||||
@ -124,6 +127,28 @@ public abstract class TransportTasksAction<
|
||||
return clusterState.nodes().resolveNodesIds(request.nodesIds());
|
||||
}
|
||||
|
||||
protected void processTasks(TasksRequest request, Consumer<OperationTask> operation) {
|
||||
if (request.taskId() != BaseTasksRequest.ALL_TASKS) {
|
||||
// we are only checking one task, we can optimize it
|
||||
Task task = taskManager.getTask(request.taskId());
|
||||
if (task != null) {
|
||||
if (request.match(task)) {
|
||||
operation.accept((OperationTask) task);
|
||||
} else {
|
||||
throw new ResourceNotFoundException("task [{}] doesn't support this operation", request.taskId());
|
||||
}
|
||||
} else {
|
||||
throw new ResourceNotFoundException("task [{}] is missing", request.taskId());
|
||||
}
|
||||
} else {
|
||||
for (Task task : taskManager.getTasks().values()) {
|
||||
if (request.match(task)) {
|
||||
operation.accept((OperationTask)task);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract TasksResponse newResponse(TasksRequest request, List<TaskResponse> tasks, List<TaskOperationFailure> taskOperationFailures, List<FailedNodeException> failedNodeExceptions);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@ -150,7 +175,7 @@ public abstract class TransportTasksAction<
|
||||
|
||||
protected abstract TaskResponse readTaskResponse(StreamInput in) throws IOException;
|
||||
|
||||
protected abstract TaskResponse taskOperation(TasksRequest request, Task task);
|
||||
protected abstract TaskResponse taskOperation(TasksRequest request, OperationTask task);
|
||||
|
||||
protected boolean transportCompress() {
|
||||
return false;
|
||||
@ -213,6 +238,7 @@ public abstract class TransportTasksAction<
|
||||
} else {
|
||||
NodeTaskRequest nodeRequest = new NodeTaskRequest(request);
|
||||
nodeRequest.setParentTask(clusterService.localNode().id(), task.getId());
|
||||
taskManager.registerChildTask(task, node.getId());
|
||||
transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), new BaseTransportResponseHandler<NodeTasksResponse>() {
|
||||
@Override
|
||||
public NodeTasksResponse newInstance() {
|
||||
|
@ -33,6 +33,9 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
@ -287,6 +290,29 @@ public interface ClusterAdminClient extends ElasticsearchClient {
|
||||
*/
|
||||
ListTasksRequestBuilder prepareListTasks(String... nodesIds);
|
||||
|
||||
/**
|
||||
* Cancel tasks
|
||||
*
|
||||
* @param request The nodes tasks request
|
||||
* @return The result future
|
||||
* @see org.elasticsearch.client.Requests#cancelTasksRequest(String...)
|
||||
*/
|
||||
ActionFuture<CancelTasksResponse> cancelTasks(CancelTasksRequest request);
|
||||
|
||||
/**
|
||||
* Cancel active tasks
|
||||
*
|
||||
* @param request The nodes tasks request
|
||||
* @param listener A cancelener to be notified with a result
|
||||
* @see org.elasticsearch.client.Requests#cancelTasksRequest(String...)
|
||||
*/
|
||||
void cancelTasks(CancelTasksRequest request, ActionListener<CancelTasksResponse> listener);
|
||||
|
||||
/**
|
||||
* Cancel active tasks
|
||||
*/
|
||||
CancelTasksRequestBuilder prepareCancelTasks(String... nodesIds);
|
||||
|
||||
/**
|
||||
* Returns list of shards the given search would be executed on.
|
||||
*/
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.client;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
@ -420,12 +421,23 @@ public class Requests {
|
||||
*
|
||||
* @param nodesIds The nodes ids to get the tasks for
|
||||
* @return The nodes tasks request
|
||||
* @see org.elasticsearch.client.ClusterAdminClient#nodesStats(org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest)
|
||||
* @see org.elasticsearch.client.ClusterAdminClient#listTasks(ListTasksRequest)
|
||||
*/
|
||||
public static ListTasksRequest listTasksRequest(String... nodesIds) {
|
||||
return new ListTasksRequest(nodesIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a nodes tasks request against one or more nodes. Pass <tt>null</tt> or an empty array for all nodes.
|
||||
*
|
||||
* @param nodesIds The nodes ids to cancel the tasks on
|
||||
* @return The nodes tasks request
|
||||
* @see org.elasticsearch.client.ClusterAdminClient#cancelTasks(CancelTasksRequest)
|
||||
*/
|
||||
public static CancelTasksRequest cancelTasksRequest(String... nodesIds) {
|
||||
return new CancelTasksRequest(nodesIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers snapshot repository
|
||||
*
|
||||
|
@ -41,6 +41,10 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder;
|
||||
@ -992,6 +996,22 @@ public abstract class AbstractClient extends AbstractComponent implements Client
|
||||
return new ListTasksRequestBuilder(this, ListTasksAction.INSTANCE).setNodesIds(nodesIds);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ActionFuture<CancelTasksResponse> cancelTasks(CancelTasksRequest request) {
|
||||
return execute(CancelTasksAction.INSTANCE, request);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cancelTasks(CancelTasksRequest request, ActionListener<CancelTasksResponse> listener) {
|
||||
execute(CancelTasksAction.INSTANCE, request, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CancelTasksRequestBuilder prepareCancelTasks(String... nodesIds) {
|
||||
return new CancelTasksRequestBuilder(this, CancelTasksAction.INSTANCE).setNodesIds(nodesIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionFuture<ClusterSearchShardsResponse> searchShards(final ClusterSearchShardsRequest request) {
|
||||
return execute(ClusterSearchShardsAction.INSTANCE, request);
|
||||
|
@ -290,7 +290,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
||||
if (!MapperService.DEFAULT_MAPPING.equals(mappingType) && !PercolatorService.TYPE_NAME.equals(mappingType) && mappingType.charAt(0) == '_') {
|
||||
throw new InvalidTypeNameException("Document mapping type name can't start with '_'");
|
||||
}
|
||||
final Map<String, MappingMetaData> mappings = new HashMap<>();
|
||||
MetaData.Builder builder = MetaData.builder(currentState.metaData());
|
||||
for (String index : request.indices()) {
|
||||
// do the actual merge here on the master, and update the mapping source
|
||||
IndexService indexService = indicesService.indexService(index);
|
||||
@ -311,7 +311,6 @@ public class MetaDataMappingService extends AbstractComponent {
|
||||
// same source, no changes, ignore it
|
||||
} else {
|
||||
// use the merged mapping source
|
||||
mappings.put(index, new MappingMetaData(mergedMapper));
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("[{}] update_mapping [{}] with source [{}]", index, mergedMapper.type(), updatedSource);
|
||||
} else if (logger.isInfoEnabled()) {
|
||||
@ -320,28 +319,24 @@ public class MetaDataMappingService extends AbstractComponent {
|
||||
|
||||
}
|
||||
} else {
|
||||
mappings.put(index, new MappingMetaData(mergedMapper));
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("[{}] create_mapping [{}] with source [{}]", index, mappingType, updatedSource);
|
||||
} else if (logger.isInfoEnabled()) {
|
||||
logger.info("[{}] create_mapping [{}]", index, mappingType);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (mappings.isEmpty()) {
|
||||
// no changes, return
|
||||
return currentState;
|
||||
}
|
||||
MetaData.Builder builder = MetaData.builder(currentState.metaData());
|
||||
for (String indexName : request.indices()) {
|
||||
IndexMetaData indexMetaData = currentState.metaData().index(indexName);
|
||||
|
||||
IndexMetaData indexMetaData = currentState.metaData().index(index);
|
||||
if (indexMetaData == null) {
|
||||
throw new IndexNotFoundException(indexName);
|
||||
throw new IndexNotFoundException(index);
|
||||
}
|
||||
MappingMetaData mappingMd = mappings.get(indexName);
|
||||
if (mappingMd != null) {
|
||||
builder.put(IndexMetaData.builder(indexMetaData).putMapping(mappingMd));
|
||||
IndexMetaData.Builder indexMetaDataBuilder = IndexMetaData.builder(indexMetaData);
|
||||
// Mapping updates on a single type may have side-effects on other types so we need to
|
||||
// update mapping metadata on all types
|
||||
for (DocumentMapper mapper : indexService.mapperService().docMappers(true)) {
|
||||
indexMetaDataBuilder.putMapping(new MappingMetaData(mapper.mappingSource()));
|
||||
}
|
||||
builder.put(indexMetaDataBuilder);
|
||||
}
|
||||
|
||||
return ClusterState.builder(currentState).metaData(builder).build();
|
||||
|
@ -189,6 +189,7 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
|
||||
@Override
|
||||
protected void doStart() {
|
||||
add(localNodeMasterListeners);
|
||||
add(taskManager);
|
||||
this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build();
|
||||
this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME), threadPool.getThreadContext());
|
||||
this.reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, new ReconnectToNodes());
|
||||
|
@ -19,20 +19,23 @@
|
||||
|
||||
package org.elasticsearch.common.hash;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MessageDigests {
|
||||
/**
|
||||
* This MessageDigests class provides convenience methods for obtaining
|
||||
* thread local {@link MessageDigest} instances for MD5, SHA-1, and
|
||||
* SHA-256 message digests.
|
||||
*/
|
||||
public final class MessageDigests {
|
||||
|
||||
private static ThreadLocal<MessageDigest> createThreadLocalMessageDigest(String digest) {
|
||||
return ThreadLocal.withInitial(() -> {
|
||||
try {
|
||||
return MessageDigest.getInstance(digest);
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new ElasticsearchException("unexpected exception creating MessageDigest instance for [" + digest + "]", e);
|
||||
throw new IllegalStateException("unexpected exception creating MessageDigest instance for [" + digest + "]", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -41,14 +44,38 @@ public class MessageDigests {
|
||||
private static final ThreadLocal<MessageDigest> SHA_1_DIGEST = createThreadLocalMessageDigest("SHA-1");
|
||||
private static final ThreadLocal<MessageDigest> SHA_256_DIGEST = createThreadLocalMessageDigest("SHA-256");
|
||||
|
||||
/**
|
||||
* Returns a {@link MessageDigest} instance for MD5 digests; note
|
||||
* that the instance returned is thread local and must not be
|
||||
* shared amongst threads.
|
||||
*
|
||||
* @return a thread local {@link MessageDigest} instance that
|
||||
* provides MD5 message digest functionality.
|
||||
*/
|
||||
public static MessageDigest md5() {
|
||||
return get(MD5_DIGEST);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MessageDigest} instance for SHA-1 digests; note
|
||||
* that the instance returned is thread local and must not be
|
||||
* shared amongst threads.
|
||||
*
|
||||
* @return a thread local {@link MessageDigest} instance that
|
||||
* provides SHA-1 message digest functionality.
|
||||
*/
|
||||
public static MessageDigest sha1() {
|
||||
return get(SHA_1_DIGEST);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link MessageDigest} instance for SHA-256 digests;
|
||||
* note that the instance returned is thread local and must not be
|
||||
* shared amongst threads.
|
||||
*
|
||||
* @return a thread local {@link MessageDigest} instance that
|
||||
* provides SHA-256 message digest functionality.
|
||||
*/
|
||||
public static MessageDigest sha256() {
|
||||
return get(SHA_256_DIGEST);
|
||||
}
|
||||
@ -61,6 +88,12 @@ public class MessageDigests {
|
||||
|
||||
private static final char[] HEX_DIGITS = "0123456789abcdef".toCharArray();
|
||||
|
||||
/**
|
||||
* Format a byte array as a hex string.
|
||||
*
|
||||
* @param bytes the input to be represented as hex.
|
||||
* @return a hex representation of the input as a String.
|
||||
*/
|
||||
public static String toHexString(byte[] bytes) {
|
||||
Objects.requireNonNull(bytes);
|
||||
StringBuilder sb = new StringBuilder(2 * bytes.length);
|
||||
|
@ -39,7 +39,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
|
||||
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -38,7 +38,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.SmoothingModel;
|
||||
import org.elasticsearch.search.suggest.phrase.SmoothingModel;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.joda.time.ReadableInstant;
|
||||
|
||||
|
@ -116,6 +116,11 @@ public class FieldValueFactorFunction extends ScoreFunction {
|
||||
Objects.equals(this.modifier, fieldValueFactorFunction.modifier);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(boostFactor, field, modifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* The Type class encapsulates the modification types that can be applied
|
||||
* to the score/value product.
|
||||
|
@ -25,6 +25,8 @@ import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Pseudo randomly generate a score for each {@link LeafScoreFunction#score}.
|
||||
*/
|
||||
@ -92,4 +94,9 @@ public class RandomScoreFunction extends ScoreFunction {
|
||||
return this.originalSeed == randomScoreFunction.originalSeed &&
|
||||
this.saltedSeed == randomScoreFunction.saltedSeed;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(originalSeed, saltedSeed);
|
||||
}
|
||||
}
|
||||
|
@ -66,4 +66,15 @@ public abstract class ScoreFunction {
|
||||
* Indicates whether some other {@link ScoreFunction} object of the same type is "equal to" this one.
|
||||
*/
|
||||
protected abstract boolean doEquals(ScoreFunction other);
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
/*
|
||||
* Override hashCode here and forward to an abstract method to force extensions of this class to override hashCode in the same
|
||||
* way that we force them to override equals. This also prevents false positives in CheckStyle's EqualsHashCode check.
|
||||
*/
|
||||
return Objects.hash(scoreCombiner, doHashCode());
|
||||
}
|
||||
|
||||
protected abstract int doHashCode();
|
||||
}
|
||||
|
@ -133,4 +133,9 @@ public class ScriptScoreFunction extends ScoreFunction {
|
||||
ScriptScoreFunction scriptScoreFunction = (ScriptScoreFunction) other;
|
||||
return Objects.equals(this.sScript, scriptScoreFunction.sScript);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(sScript);
|
||||
}
|
||||
}
|
@ -93,6 +93,11 @@ public class WeightFactorFunction extends ScoreFunction {
|
||||
Objects.equals(this.scoreFunction, weightFactorFunction.scoreFunction);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(weight, scoreFunction);
|
||||
}
|
||||
|
||||
private static class ScoreOne extends ScoreFunction {
|
||||
|
||||
protected ScoreOne(CombineFunction scoreCombiner) {
|
||||
@ -123,5 +128,10 @@ public class WeightFactorFunction extends ScoreFunction {
|
||||
protected boolean doEquals(ScoreFunction other) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -40,6 +40,7 @@ import org.elasticsearch.rest.action.admin.cluster.health.RestClusterHealthActio
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.hotthreads.RestNodesHotThreadsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.info.RestNodesInfoAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.stats.RestNodesStatsAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.tasks.RestCancelTasksAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.node.tasks.RestListTasksAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.delete.RestDeleteRepositoryAction;
|
||||
import org.elasticsearch.rest.action.admin.cluster.repositories.get.RestGetRepositoriesAction;
|
||||
@ -265,6 +266,7 @@ public class NetworkModule extends AbstractModule {
|
||||
|
||||
// Tasks API
|
||||
RestListTasksAction.class,
|
||||
RestCancelTasksAction.class,
|
||||
|
||||
// Ingest API
|
||||
RestPutPipelineAction.class,
|
||||
|
@ -38,16 +38,16 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||
|
||||
private ParseFieldMatcher matcher = ParseFieldMatcher.STRICT;
|
||||
|
||||
//Currently this is not a setting that can be changed and is a policy
|
||||
// Currently this is not a setting that can be changed and is a policy
|
||||
// that relates to how parsing of things like "boost" are done across
|
||||
// the whole of Elasticsearch (eg if String "1.0" is a valid float).
|
||||
// The idea behind keeping it as a constant is that we can track
|
||||
// references to this policy decision throughout the codebase and find
|
||||
// and change any code that needs to apply an alternative policy.
|
||||
public static final boolean DEFAULT_NUMBER_COEERCE_POLICY = true;
|
||||
public static final boolean DEFAULT_NUMBER_COERCE_POLICY = true;
|
||||
|
||||
private static void checkCoerceString(boolean coeerce, Class<? extends Number> clazz) {
|
||||
if (!coeerce) {
|
||||
private static void checkCoerceString(boolean coerce, Class<? extends Number> clazz) {
|
||||
if (!coerce) {
|
||||
//Need to throw type IllegalArgumentException as current catch logic in
|
||||
//NumberFieldMapper.parseCreateField relies on this for "malformed" value detection
|
||||
throw new IllegalArgumentException(clazz.getSimpleName() + " value passed as String");
|
||||
@ -102,7 +102,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||
|
||||
@Override
|
||||
public short shortValue() throws IOException {
|
||||
return shortValue(DEFAULT_NUMBER_COEERCE_POLICY);
|
||||
return shortValue(DEFAULT_NUMBER_COERCE_POLICY);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -121,7 +121,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||
|
||||
@Override
|
||||
public int intValue() throws IOException {
|
||||
return intValue(DEFAULT_NUMBER_COEERCE_POLICY);
|
||||
return intValue(DEFAULT_NUMBER_COERCE_POLICY);
|
||||
}
|
||||
|
||||
|
||||
@ -141,7 +141,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||
|
||||
@Override
|
||||
public long longValue() throws IOException {
|
||||
return longValue(DEFAULT_NUMBER_COEERCE_POLICY);
|
||||
return longValue(DEFAULT_NUMBER_COERCE_POLICY);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -160,7 +160,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||
|
||||
@Override
|
||||
public float floatValue() throws IOException {
|
||||
return floatValue(DEFAULT_NUMBER_COEERCE_POLICY);
|
||||
return floatValue(DEFAULT_NUMBER_COERCE_POLICY);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -178,7 +178,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||
|
||||
@Override
|
||||
public double doubleValue() throws IOException {
|
||||
return doubleValue(DEFAULT_NUMBER_COEERCE_POLICY);
|
||||
return doubleValue(DEFAULT_NUMBER_COERCE_POLICY);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.env;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
|
@ -145,8 +145,8 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
StringFieldMapper.Builder builder = stringField(name);
|
||||
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
StringFieldMapper.Builder builder = stringField(fieldName);
|
||||
// hack for the fact that string can't just accept true/false for
|
||||
// the index property and still accepts no/not_analyzed/analyzed
|
||||
final Object index = node.remove("index");
|
||||
@ -165,10 +165,10 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||
node.put("index", false);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
|
||||
}
|
||||
}
|
||||
parseTextField(builder, name, node, parserContext);
|
||||
parseTextField(builder, fieldName, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
@ -182,7 +182,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||
} else if (propName.equals("search_quote_analyzer")) {
|
||||
NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
|
||||
if (analyzer == null) {
|
||||
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
|
||||
throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + fieldName + "]");
|
||||
}
|
||||
builder.searchQuotedAnalyzer(analyzer);
|
||||
iterator.remove();
|
||||
@ -207,7 +207,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||
} else if (propName.equals("ignore_above")) {
|
||||
builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
|
||||
iterator.remove();
|
||||
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
|
||||
} else if (parseMultiField(builder, fieldName, parserContext, propName, propNode)) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
@ -336,7 +336,7 @@ public class TypeParsers {
|
||||
case "false":
|
||||
return false;
|
||||
default:
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true] or [false]");
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]");
|
||||
}
|
||||
} else {
|
||||
final String normalizedIndex = Strings.toUnderscoreCase(index);
|
||||
@ -349,7 +349,7 @@ public class TypeParsers {
|
||||
case "no":
|
||||
return false;
|
||||
default:
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
|
||||
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -388,7 +388,7 @@ public class TypeParsers {
|
||||
}
|
||||
SimilarityProvider similarityProvider = parserContext.getSimilarity(value);
|
||||
if (similarityProvider == null) {
|
||||
throw new MapperParsingException("Unknown Similarity type [" + value + "] for [" + name + "]");
|
||||
throw new MapperParsingException("Unknown Similarity type [" + value + "] for field [" + name + "]");
|
||||
}
|
||||
return similarityProvider;
|
||||
}
|
||||
|
@ -383,6 +383,11 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder> ext
|
||||
return super.doEquals(other) &&
|
||||
Objects.equals(this.origin, geoFieldDataScoreFunction.origin);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(super.doHashCode(), origin);
|
||||
}
|
||||
}
|
||||
|
||||
static class NumericFieldDataScoreFunction extends AbstractDistanceScoreFunction {
|
||||
@ -533,5 +538,10 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder> ext
|
||||
Objects.equals(this.func, distanceScoreFunction.func) &&
|
||||
Objects.equals(this.getFieldName(), distanceScoreFunction.getFieldName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(scale, offset, mode, func, getFieldName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -38,7 +38,8 @@ public final class ConfigurationUtils {
|
||||
*
|
||||
* If the property value isn't of type string a {@link ElasticsearchParseException} is thrown.
|
||||
*/
|
||||
public static String readOptionalStringProperty(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
|
||||
public static String readOptionalStringProperty(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
return readString(processorType, processorTag, propertyName, value);
|
||||
}
|
||||
@ -49,7 +50,8 @@ public final class ConfigurationUtils {
|
||||
* If the property value isn't of type string an {@link ElasticsearchParseException} is thrown.
|
||||
* If the property is missing an {@link ElasticsearchParseException} is thrown
|
||||
*/
|
||||
public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
|
||||
public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName) {
|
||||
return readStringProperty(processorType, processorTag, configuration, propertyName, null);
|
||||
}
|
||||
|
||||
@ -59,7 +61,8 @@ public final class ConfigurationUtils {
|
||||
* If the property value isn't of type string a {@link ElasticsearchParseException} is thrown.
|
||||
* If the property is missing and no default value has been specified a {@link ElasticsearchParseException} is thrown
|
||||
*/
|
||||
public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration, String propertyName, String defaultValue) {
|
||||
public static String readStringProperty(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName, String defaultValue) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
if (value == null && defaultValue != null) {
|
||||
return defaultValue;
|
||||
@ -76,7 +79,28 @@ public final class ConfigurationUtils {
|
||||
if (value instanceof String) {
|
||||
return (String) value;
|
||||
}
|
||||
throw newConfigurationException(processorType, processorTag, propertyName, "property isn't a string, but of type [" + value.getClass().getName() + "]");
|
||||
throw newConfigurationException(processorType, processorTag, propertyName, "property isn't a string, but of type [" +
|
||||
value.getClass().getName() + "]");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns and removes the specified property from the specified configuration map.
|
||||
*
|
||||
* If the property value isn't of type int a {@link ElasticsearchParseException} is thrown.
|
||||
* If the property is missing an {@link ElasticsearchParseException} is thrown
|
||||
*/
|
||||
public static int readIntProperty(String processorType, String processorTag, Map<String, Object> configuration, String propertyName,
|
||||
int defaultValue) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
if (value == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
try {
|
||||
return Integer.parseInt(value.toString());
|
||||
} catch (Throwable t) {
|
||||
throw newConfigurationException(processorType, processorTag, propertyName,
|
||||
"property cannot be converted to an int [" + value.toString() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -84,7 +108,8 @@ public final class ConfigurationUtils {
|
||||
*
|
||||
* If the property value isn't of type list an {@link ElasticsearchParseException} is thrown.
|
||||
*/
|
||||
public static <T> List<T> readOptionalList(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
|
||||
public static <T> List<T> readOptionalList(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
if (value == null) {
|
||||
return null;
|
||||
@ -113,7 +138,8 @@ public final class ConfigurationUtils {
|
||||
List<T> stringList = (List<T>) value;
|
||||
return stringList;
|
||||
} else {
|
||||
throw newConfigurationException(processorType, processorTag, propertyName, "property isn't a list, but of type [" + value.getClass().getName() + "]");
|
||||
throw newConfigurationException(processorType, processorTag, propertyName,
|
||||
"property isn't a list, but of type [" + value.getClass().getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@ -123,7 +149,8 @@ public final class ConfigurationUtils {
|
||||
* If the property value isn't of type map an {@link ElasticsearchParseException} is thrown.
|
||||
* If the property is missing an {@link ElasticsearchParseException} is thrown
|
||||
*/
|
||||
public static <T> Map<String, T> readMap(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
|
||||
public static <T> Map<String, T> readMap(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
if (value == null) {
|
||||
throw newConfigurationException(processorType, processorTag, propertyName, "required property is missing");
|
||||
@ -137,7 +164,8 @@ public final class ConfigurationUtils {
|
||||
*
|
||||
* If the property value isn't of type map an {@link ElasticsearchParseException} is thrown.
|
||||
*/
|
||||
public static <T> Map<String, T> readOptionalMap(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) {
|
||||
public static <T> Map<String, T> readOptionalMap(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
if (value == null) {
|
||||
return null;
|
||||
@ -152,7 +180,8 @@ public final class ConfigurationUtils {
|
||||
Map<String, T> map = (Map<String, T>) value;
|
||||
return map;
|
||||
} else {
|
||||
throw newConfigurationException(processorType, processorTag, propertyName, "property isn't a map, but of type [" + value.getClass().getName() + "]");
|
||||
throw newConfigurationException(processorType, processorTag, propertyName,
|
||||
"property isn't a map, but of type [" + value.getClass().getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@ -167,7 +196,8 @@ public final class ConfigurationUtils {
|
||||
return value;
|
||||
}
|
||||
|
||||
public static ElasticsearchParseException newConfigurationException(String processorType, String processorTag, String propertyName, String reason) {
|
||||
public static ElasticsearchParseException newConfigurationException(String processorType, String processorTag, String propertyName,
|
||||
String reason) {
|
||||
ElasticsearchParseException exception = new ElasticsearchParseException("[" + propertyName + "] " + reason);
|
||||
|
||||
if (processorType != null) {
|
||||
@ -182,7 +212,8 @@ public final class ConfigurationUtils {
|
||||
return exception;
|
||||
}
|
||||
|
||||
public static List<Processor> readProcessorConfigs(List<Map<String, Map<String, Object>>> processorConfigs, ProcessorsRegistry processorRegistry) throws Exception {
|
||||
public static List<Processor> readProcessorConfigs(List<Map<String, Map<String, Object>>> processorConfigs,
|
||||
ProcessorsRegistry processorRegistry) throws Exception {
|
||||
List<Processor> processors = new ArrayList<>();
|
||||
if (processorConfigs != null) {
|
||||
for (Map<String, Map<String, Object>> processorConfigWithKey : processorConfigs) {
|
||||
@ -197,12 +228,15 @@ public final class ConfigurationUtils {
|
||||
private static Processor readProcessor(ProcessorsRegistry processorRegistry, String type, Map<String, Object> config) throws Exception {
|
||||
Processor.Factory factory = processorRegistry.getProcessorFactory(type);
|
||||
if (factory != null) {
|
||||
List<Map<String, Map<String, Object>>> onFailureProcessorConfigs = ConfigurationUtils.readOptionalList(null, null, config, Pipeline.ON_FAILURE_KEY);
|
||||
List<Map<String, Map<String, Object>>> onFailureProcessorConfigs =
|
||||
ConfigurationUtils.readOptionalList(null, null, config, Pipeline.ON_FAILURE_KEY);
|
||||
|
||||
List<Processor> onFailureProcessors = readProcessorConfigs(onFailureProcessorConfigs, processorRegistry);
|
||||
Processor processor;
|
||||
processor = factory.create(config);
|
||||
if (!config.isEmpty()) {
|
||||
throw new ElasticsearchParseException("processor [" + type + "] doesn't support one or more provided configuration parameters " + Arrays.toString(config.keySet().toArray()));
|
||||
throw new ElasticsearchParseException("processor [{}] doesn't support one or more provided configuration parameters {}",
|
||||
type, Arrays.toString(config.keySet().toArray()));
|
||||
}
|
||||
if (onFailureProcessors.isEmpty()) {
|
||||
return processor;
|
||||
|
@ -208,17 +208,23 @@ class InstallPluginCommand extends CliTool.Command {
|
||||
return zip;
|
||||
}
|
||||
|
||||
private Path unzip(Path zip, Path pluginsDir) throws IOException {
|
||||
private Path unzip(Path zip, Path pluginsDir) throws IOException, UserError {
|
||||
// unzip plugin to a staging temp dir
|
||||
Path target = Files.createTempDirectory(pluginsDir, ".installing-");
|
||||
Files.createDirectories(target);
|
||||
|
||||
boolean hasEsDir = false;
|
||||
// TODO: we should wrap this in a try/catch and try deleting the target dir on failure?
|
||||
try (ZipInputStream zipInput = new ZipInputStream(Files.newInputStream(zip))) {
|
||||
ZipEntry entry;
|
||||
byte[] buffer = new byte[8192];
|
||||
while ((entry = zipInput.getNextEntry()) != null) {
|
||||
Path targetFile = target.resolve(entry.getName());
|
||||
if (entry.getName().startsWith("elasticsearch/") == false) {
|
||||
// only extract the elasticsearch directory
|
||||
continue;
|
||||
}
|
||||
hasEsDir = true;
|
||||
Path targetFile = target.resolve(entry.getName().substring("elasticsearch/".length()));
|
||||
// TODO: handle name being an absolute path
|
||||
|
||||
// be on the safe side: do not rely on that directories are always extracted
|
||||
@ -236,6 +242,10 @@ class InstallPluginCommand extends CliTool.Command {
|
||||
}
|
||||
}
|
||||
Files.delete(zip);
|
||||
if (hasEsDir == false) {
|
||||
IOUtils.rm(target);
|
||||
throw new UserError(CliTool.ExitStatus.DATA_ERROR, "`elasticsearch` directory is missing in the plugin zip");
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
|
62
core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java
Normal file
62
core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestCancelTasksAction.java
Normal file
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.rest.action.admin.cluster.node.tasks;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.support.RestToXContentListener;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
|
||||
public class RestCancelTasksAction extends BaseRestHandler {
|
||||
|
||||
@Inject
|
||||
public RestCancelTasksAction(Settings settings, RestController controller, Client client) {
|
||||
super(settings, client);
|
||||
controller.registerHandler(POST, "/_tasks/_cancel", this);
|
||||
controller.registerHandler(POST, "/_tasks/{nodeId}/_cancel", this);
|
||||
controller.registerHandler(POST, "/_tasks/{nodeId}/{taskId}/_cancel", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId"));
|
||||
long taskId = request.paramAsLong("taskId", ListTasksRequest.ALL_TASKS);
|
||||
String[] actions = Strings.splitStringByCommaToArray(request.param("actions"));
|
||||
String parentNode = request.param("parent_node");
|
||||
long parentTaskId = request.paramAsLong("parent_task", ListTasksRequest.ALL_TASKS);
|
||||
|
||||
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest(nodesIds);
|
||||
cancelTasksRequest.taskId(taskId);
|
||||
cancelTasksRequest.actions(actions);
|
||||
cancelTasksRequest.parentNode(parentNode);
|
||||
cancelTasksRequest.parentTaskId(parentTaskId);
|
||||
client.admin().cluster().cancelTasks(cancelTasksRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
4
core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
4
core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java
@ -40,18 +40,20 @@ public class RestListTasksAction extends BaseRestHandler {
|
||||
super(settings, client);
|
||||
controller.registerHandler(GET, "/_tasks", this);
|
||||
controller.registerHandler(GET, "/_tasks/{nodeId}", this);
|
||||
controller.registerHandler(GET, "/_tasks/{nodeId}/{actions}", this);
|
||||
controller.registerHandler(GET, "/_tasks/{nodeId}/{taskId}", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
|
||||
boolean detailed = request.paramAsBoolean("detailed", false);
|
||||
String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId"));
|
||||
long taskId = request.paramAsLong("taskId", ListTasksRequest.ALL_TASKS);
|
||||
String[] actions = Strings.splitStringByCommaToArray(request.param("actions"));
|
||||
String parentNode = request.param("parent_node");
|
||||
long parentTaskId = request.paramAsLong("parent_task", ListTasksRequest.ALL_TASKS);
|
||||
|
||||
ListTasksRequest listTasksRequest = new ListTasksRequest(nodesIds);
|
||||
listTasksRequest.taskId(taskId);
|
||||
listTasksRequest.detailed(detailed);
|
||||
listTasksRequest.actions(actions);
|
||||
listTasksRequest.parentNode(parentNode);
|
||||
|
2
core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java
2
core/src/main/java/org/elasticsearch/rest/action/admin/indices/alias/RestIndicesAliasesAction.java
@ -133,7 +133,7 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
|
||||
}
|
||||
|
||||
if (type == AliasAction.Type.ADD) {
|
||||
AliasActions aliasActions = new AliasActions(type, indices, aliases);
|
||||
AliasActions aliasActions = new AliasActions(type, indices, aliases).filter(filter);
|
||||
if (routingSet) {
|
||||
aliasActions.routing(routing);
|
||||
}
|
||||
|
@ -22,41 +22,115 @@ package org.elasticsearch.search.sort;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A geo distance based sorting on a geo point like field.
|
||||
*/
|
||||
public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
public class GeoDistanceSortBuilder extends SortBuilder
|
||||
implements ToXContent, NamedWriteable<GeoDistanceSortBuilder>, SortElementParserTemp<GeoDistanceSortBuilder> {
|
||||
public static final String NAME = "_geo_distance";
|
||||
public static final boolean DEFAULT_COERCE = false;
|
||||
public static final boolean DEFAULT_IGNORE_MALFORMED = false;
|
||||
|
||||
final String fieldName;
|
||||
static final GeoDistanceSortBuilder PROTOTYPE = new GeoDistanceSortBuilder("", -1, -1);
|
||||
|
||||
private final String fieldName;
|
||||
private final List<GeoPoint> points = new ArrayList<>();
|
||||
private final List<String> geohashes = new ArrayList<>();
|
||||
|
||||
private GeoDistance geoDistance;
|
||||
private DistanceUnit unit;
|
||||
private SortOrder order;
|
||||
private String sortMode;
|
||||
private GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||
private DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
private SortOrder order = SortOrder.ASC;
|
||||
|
||||
// TODO there is an enum that covers that parameter which we should be using here
|
||||
private String sortMode = null;
|
||||
@SuppressWarnings("rawtypes")
|
||||
private QueryBuilder nestedFilter;
|
||||
private String nestedPath;
|
||||
private Boolean coerce;
|
||||
private Boolean ignoreMalformed;
|
||||
|
||||
// TODO switch to GeoValidationMethod enum
|
||||
private boolean coerce = DEFAULT_COERCE;
|
||||
private boolean ignoreMalformed = DEFAULT_IGNORE_MALFORMED;
|
||||
|
||||
/**
|
||||
* Constructs a new distance based sort on a geo point like field.
|
||||
*
|
||||
* @param fieldName The geo point like field name.
|
||||
* @param points The points to create the range distance facets from.
|
||||
*/
|
||||
public GeoDistanceSortBuilder(String fieldName) {
|
||||
public GeoDistanceSortBuilder(String fieldName, GeoPoint... points) {
|
||||
this.fieldName = fieldName;
|
||||
if (points.length == 0) {
|
||||
throw new IllegalArgumentException("Geo distance sorting needs at least one point.");
|
||||
}
|
||||
this.points.addAll(Arrays.asList(points));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new distance based sort on a geo point like field.
|
||||
*
|
||||
* @param fieldName The geo point like field name.
|
||||
* @param lat Latitude of the point to create the range distance facets from.
|
||||
* @param lon Longitude of the point to create the range distance facets from.
|
||||
*/
|
||||
public GeoDistanceSortBuilder(String fieldName, double lat, double lon) {
|
||||
this(fieldName, new GeoPoint(lat, lon));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new distance based sort on a geo point like field.
|
||||
*
|
||||
* @param fieldName The geo point like field name.
|
||||
* @param geohashes The points to create the range distance facets from.
|
||||
*/
|
||||
public GeoDistanceSortBuilder(String fieldName, String ... geohashes) {
|
||||
if (geohashes.length == 0) {
|
||||
throw new IllegalArgumentException("Geo distance sorting needs at least one point.");
|
||||
}
|
||||
for (String geohash : geohashes) {
|
||||
this.points.add(GeoPoint.fromGeohash(geohash));
|
||||
}
|
||||
this.fieldName = fieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor.
|
||||
* */
|
||||
GeoDistanceSortBuilder(GeoDistanceSortBuilder original) {
|
||||
this.fieldName = original.fieldName();
|
||||
this.points.addAll(original.points);
|
||||
this.geoDistance = original.geoDistance;
|
||||
this.unit = original.unit;
|
||||
this.order = original.order;
|
||||
this.sortMode = original.sortMode;
|
||||
this.nestedFilter = original.nestedFilter;
|
||||
this.nestedPath = original.nestedPath;
|
||||
this.coerce = original.coerce;
|
||||
this.ignoreMalformed = original.ignoreMalformed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the geo point like field the distance based sort operates on.
|
||||
* */
|
||||
public String fieldName() {
|
||||
return this.fieldName;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -79,15 +153,27 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
this.points.addAll(Arrays.asList(points));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the points to create the range distance facets from.
|
||||
*/
|
||||
public GeoPoint[] points() {
|
||||
return this.points.toArray(new GeoPoint[this.points.size()]);
|
||||
}
|
||||
|
||||
/**
|
||||
* The geohash of the geo point to create the range distance facets from.
|
||||
*
|
||||
* Deprecated - please use points(GeoPoint... points) instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public GeoDistanceSortBuilder geohashes(String... geohashes) {
|
||||
this.geohashes.addAll(Arrays.asList(geohashes));
|
||||
for (String geohash : geohashes) {
|
||||
this.points.add(GeoPoint.fromGeohash(geohash));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The geo distance type used to compute the distance.
|
||||
*/
|
||||
@ -95,6 +181,13 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
this.geoDistance = geoDistance;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the geo distance type used to compute the distance.
|
||||
*/
|
||||
public GeoDistance geoDistance() {
|
||||
return this.geoDistance;
|
||||
}
|
||||
|
||||
/**
|
||||
* The distance unit to use. Defaults to {@link org.elasticsearch.common.unit.DistanceUnit#KILOMETERS}
|
||||
@ -104,6 +197,13 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the distance unit to use. Defaults to {@link org.elasticsearch.common.unit.DistanceUnit#KILOMETERS}
|
||||
*/
|
||||
public DistanceUnit unit() {
|
||||
return this.unit;
|
||||
}
|
||||
|
||||
/**
|
||||
* The order of sorting. Defaults to {@link SortOrder#ASC}.
|
||||
*/
|
||||
@ -113,11 +213,18 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns the order of sorting. */
|
||||
public SortOrder order() {
|
||||
return this.order;
|
||||
}
|
||||
|
||||
/**
|
||||
* Not relevant.
|
||||
*
|
||||
* TODO should this throw an exception rather than silently ignore a parameter that is not used?
|
||||
*/
|
||||
@Override
|
||||
public SortBuilder missing(Object missing) {
|
||||
public GeoDistanceSortBuilder missing(Object missing) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -126,10 +233,19 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
* Possible values: min and max
|
||||
*/
|
||||
public GeoDistanceSortBuilder sortMode(String sortMode) {
|
||||
MultiValueMode temp = MultiValueMode.fromString(sortMode);
|
||||
if (temp == MultiValueMode.SUM) {
|
||||
throw new IllegalArgumentException("sort_mode [sum] isn't supported for sorting by geo distance");
|
||||
}
|
||||
this.sortMode = sortMode;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns which distance to use for sorting in the case a document contains multiple geo points. */
|
||||
public String sortMode() {
|
||||
return this.sortMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested filter that the nested objects should match with in order to be taken into account
|
||||
* for sorting.
|
||||
@ -139,6 +255,14 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the nested filter that the nested objects should match with in order to be taken into account
|
||||
* for sorting.
|
||||
**/
|
||||
public QueryBuilder getNestedFilter() {
|
||||
return this.nestedFilter;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a
|
||||
* field inside a nested object, the nearest upper nested object is selected as nested path.
|
||||
@ -147,42 +271,53 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
this.nestedPath = nestedPath;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a
|
||||
* field inside a nested object, the nearest upper nested object is selected as nested path.
|
||||
*/
|
||||
public String getNestedPath() {
|
||||
return this.nestedPath;
|
||||
}
|
||||
|
||||
public GeoDistanceSortBuilder coerce(boolean coerce) {
|
||||
this.coerce = coerce;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean coerce() {
|
||||
return this.coerce;
|
||||
}
|
||||
|
||||
public GeoDistanceSortBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
if (coerce == false) {
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean ignoreMalformed() {
|
||||
return this.ignoreMalformed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("_geo_distance");
|
||||
if (geohashes.size() == 0 && points.size() == 0) {
|
||||
throw new ElasticsearchParseException("No points provided for _geo_distance sort.");
|
||||
}
|
||||
builder.startObject(NAME);
|
||||
|
||||
builder.startArray(fieldName);
|
||||
for (GeoPoint point : points) {
|
||||
builder.value(point);
|
||||
}
|
||||
for (String geohash : geohashes) {
|
||||
builder.value(geohash);
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
if (unit != null) {
|
||||
builder.field("unit", unit);
|
||||
}
|
||||
if (geoDistance != null) {
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
builder.field("unit", unit);
|
||||
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
if (order == SortOrder.DESC) {
|
||||
builder.field("reverse", true);
|
||||
} else {
|
||||
builder.field("reverse", false);
|
||||
}
|
||||
|
||||
if (sortMode != null) {
|
||||
builder.field("mode", sortMode);
|
||||
}
|
||||
@ -193,14 +328,200 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
||||
if (nestedFilter != null) {
|
||||
builder.field("nested_filter", nestedFilter, params);
|
||||
}
|
||||
if (coerce != null) {
|
||||
builder.field("coerce", coerce);
|
||||
}
|
||||
if (ignoreMalformed != null) {
|
||||
builder.field("ignore_malformed", ignoreMalformed);
|
||||
}
|
||||
builder.field("coerce", coerce);
|
||||
builder.field("ignore_malformed", ignoreMalformed);
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object object) {
|
||||
if (this == object) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (object == null || getClass() != object.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
GeoDistanceSortBuilder other = (GeoDistanceSortBuilder) object;
|
||||
return Objects.equals(fieldName, other.fieldName) &&
|
||||
Objects.deepEquals(points, other.points) &&
|
||||
Objects.equals(geoDistance, other.geoDistance) &&
|
||||
Objects.equals(unit, other.unit) &&
|
||||
Objects.equals(sortMode, other.sortMode) &&
|
||||
Objects.equals(order, other.order) &&
|
||||
Objects.equals(nestedFilter, other.nestedFilter) &&
|
||||
Objects.equals(nestedPath, other.nestedPath) &&
|
||||
Objects.equals(coerce, other.coerce) &&
|
||||
Objects.equals(ignoreMalformed, other.ignoreMalformed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(this.fieldName, this.points, this.geoDistance,
|
||||
this.unit, this.sortMode, this.order, this.nestedFilter, this.nestedPath, this.coerce, this.ignoreMalformed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeGenericValue(points);
|
||||
|
||||
geoDistance.writeTo(out);
|
||||
unit.writeTo(out);
|
||||
order.writeTo(out);
|
||||
out.writeOptionalString(sortMode);
|
||||
if (nestedFilter != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeQuery(nestedFilter);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeOptionalString(nestedPath);
|
||||
out.writeBoolean(coerce);
|
||||
out.writeBoolean(ignoreMalformed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoDistanceSortBuilder readFrom(StreamInput in) throws IOException {
|
||||
String fieldName = in.readString();
|
||||
|
||||
ArrayList<GeoPoint> points = (ArrayList<GeoPoint>) in.readGenericValue();
|
||||
GeoDistanceSortBuilder result = new GeoDistanceSortBuilder(fieldName, points.toArray(new GeoPoint[points.size()]));
|
||||
|
||||
result.geoDistance(GeoDistance.readGeoDistanceFrom(in));
|
||||
result.unit(DistanceUnit.readDistanceUnit(in));
|
||||
result.order(SortOrder.readOrderFrom(in));
|
||||
String sortMode = in.readOptionalString();
|
||||
if (sortMode != null) {
|
||||
result.sortMode(sortMode);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
result.setNestedFilter(in.readQuery());
|
||||
}
|
||||
result.setNestedPath(in.readOptionalString());
|
||||
result.coerce(in.readBoolean());
|
||||
result.ignoreMalformed(in.readBoolean());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoDistanceSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
String fieldName = null;
|
||||
List<GeoPoint> geoPoints = new ArrayList<>();
|
||||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||
boolean reverse = false;
|
||||
MultiValueMode sortMode = null;
|
||||
QueryBuilder nestedFilter = null;
|
||||
String nestedPath = null;
|
||||
|
||||
boolean coerce = GeoDistanceSortBuilder.DEFAULT_COERCE;
|
||||
boolean ignoreMalformed = GeoDistanceSortBuilder.DEFAULT_IGNORE_MALFORMED;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentName = parser.currentName();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
parseGeoPoints(parser, geoPoints);
|
||||
|
||||
fieldName = currentName;
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
if ("nested_filter".equals(currentName) || "nestedFilter".equals(currentName)) {
|
||||
// TODO Note to remember: while this is kept as a QueryBuilder internally,
|
||||
// we need to make sure to call toFilter() on it once on the shard
|
||||
// (e.g. in the new build() method)
|
||||
nestedFilter = context.parseInnerQueryBuilder();
|
||||
} else {
|
||||
fieldName = currentName;
|
||||
GeoPoint point = new GeoPoint();
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
geoPoints.add(point);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("reverse".equals(currentName)) {
|
||||
reverse = parser.booleanValue();
|
||||
} else if ("order".equals(currentName)) {
|
||||
reverse = "desc".equals(parser.text());
|
||||
} else if ("unit".equals(currentName)) {
|
||||
unit = DistanceUnit.fromString(parser.text());
|
||||
} else if ("distance_type".equals(currentName) || "distanceType".equals(currentName)) {
|
||||
geoDistance = GeoDistance.fromString(parser.text());
|
||||
} else if ("coerce".equals(currentName) || "normalize".equals(currentName)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce == true) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if ("ignore_malformed".equals(currentName)) {
|
||||
boolean ignore_malformed_value = parser.booleanValue();
|
||||
if (coerce == false) {
|
||||
ignoreMalformed = ignore_malformed_value;
|
||||
}
|
||||
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
||||
sortMode = MultiValueMode.fromString(parser.text());
|
||||
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
||||
nestedPath = parser.text();
|
||||
} else {
|
||||
GeoPoint point = new GeoPoint();
|
||||
point.resetFromString(parser.text());
|
||||
geoPoints.add(point);
|
||||
fieldName = currentName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GeoDistanceSortBuilder result = new GeoDistanceSortBuilder(fieldName, geoPoints.toArray(new GeoPoint[geoPoints.size()]));
|
||||
result.geoDistance(geoDistance);
|
||||
result.unit(unit);
|
||||
if (reverse) {
|
||||
result.order(SortOrder.DESC);
|
||||
} else {
|
||||
result.order(SortOrder.ASC);
|
||||
}
|
||||
if (sortMode != null) {
|
||||
result.sortMode(sortMode.name());
|
||||
}
|
||||
result.setNestedFilter(nestedFilter);
|
||||
result.setNestedPath(nestedPath);
|
||||
result.coerce(coerce);
|
||||
result.ignoreMalformed(ignoreMalformed);
|
||||
return result;
|
||||
|
||||
}
|
||||
|
||||
static void parseGeoPoints(XContentParser parser, List<GeoPoint> geoPoints) throws IOException {
|
||||
while (!parser.nextToken().equals(XContentParser.Token.END_ARRAY)) {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
// we might get here if the geo point is " number, number] " and the parser already moved over the opening bracket
|
||||
// in this case we cannot use GeoUtils.parseGeoPoint(..) because this expects an opening bracket
|
||||
double lon = parser.doubleValue();
|
||||
parser.nextToken();
|
||||
if (!parser.currentToken().equals(XContentParser.Token.VALUE_NUMBER)) {
|
||||
throw new ElasticsearchParseException(
|
||||
"geo point parsing: expected second number but got [{}] instead",
|
||||
parser.currentToken());
|
||||
}
|
||||
double lat = parser.doubleValue();
|
||||
GeoPoint point = new GeoPoint();
|
||||
point.reset(lat, lon);
|
||||
geoPoints.add(point);
|
||||
} else {
|
||||
GeoPoint point = new GeoPoint();
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
geoPoints.add(point);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -72,8 +72,8 @@ public class GeoDistanceSortParser implements SortParser {
|
||||
NestedInnerQueryParseSupport nestedHelper = null;
|
||||
|
||||
final boolean indexCreatedBeforeV2_0 = context.indexShard().getIndexSettings().getIndexVersionCreated().before(Version.V_2_0_0);
|
||||
boolean coerce = false;
|
||||
boolean ignoreMalformed = false;
|
||||
boolean coerce = GeoDistanceSortBuilder.DEFAULT_COERCE;
|
||||
boolean ignoreMalformed = GeoDistanceSortBuilder.DEFAULT_IGNORE_MALFORMED;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentName = parser.currentName();
|
||||
@ -81,7 +81,7 @@ public class GeoDistanceSortParser implements SortParser {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
parseGeoPoints(parser, geoPoints);
|
||||
GeoDistanceSortBuilder.parseGeoPoints(parser, geoPoints);
|
||||
|
||||
fieldName = currentName;
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
@ -213,26 +213,4 @@ public class GeoDistanceSortParser implements SortParser {
|
||||
return new SortField(fieldName, geoDistanceComparatorSource, reverse);
|
||||
}
|
||||
|
||||
private void parseGeoPoints(XContentParser parser, List<GeoPoint> geoPoints) throws IOException {
|
||||
while (!parser.nextToken().equals(XContentParser.Token.END_ARRAY)) {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||
// we might get here if the geo point is " number, number] " and the parser already moved over the opening bracket
|
||||
// in this case we cannot use GeoUtils.parseGeoPoint(..) because this expects an opening bracket
|
||||
double lon = parser.doubleValue();
|
||||
parser.nextToken();
|
||||
if (!parser.currentToken().equals(XContentParser.Token.VALUE_NUMBER)) {
|
||||
throw new ElasticsearchParseException("geo point parsing: expected second number but got [{}] instead", parser.currentToken());
|
||||
}
|
||||
double lat = parser.doubleValue();
|
||||
GeoPoint point = new GeoPoint();
|
||||
point.reset(lat, lon);
|
||||
geoPoints.add(point);
|
||||
} else {
|
||||
GeoPoint point = new GeoPoint();
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
geoPoints.add(point);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,8 +19,11 @@
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.script.Script;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* A set of static factory methods for {@link SortBuilder}s.
|
||||
*
|
||||
@ -58,8 +61,31 @@ public class SortBuilders {
|
||||
* A geo distance based sort.
|
||||
*
|
||||
* @param fieldName The geo point like field name.
|
||||
* @param lat Latitude of the point to create the range distance facets from.
|
||||
* @param lon Longitude of the point to create the range distance facets from.
|
||||
*
|
||||
*/
|
||||
public static GeoDistanceSortBuilder geoDistanceSort(String fieldName) {
|
||||
return new GeoDistanceSortBuilder(fieldName);
|
||||
public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, double lat, double lon) {
|
||||
return new GeoDistanceSortBuilder(fieldName, lat, lon);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new distance based sort on a geo point like field.
|
||||
*
|
||||
* @param fieldName The geo point like field name.
|
||||
* @param points The points to create the range distance facets from.
|
||||
*/
|
||||
public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, GeoPoint... points) {
|
||||
return new GeoDistanceSortBuilder(fieldName, points);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new distance based sort on a geo point like field.
|
||||
*
|
||||
* @param fieldName The geo point like field name.
|
||||
* @param geohashes The points to create the range distance facets from.
|
||||
*/
|
||||
public static GeoDistanceSortBuilder geoDistanceSort(String fieldName, String ... geohashes) {
|
||||
return new GeoDistanceSortBuilder(fieldName, geohashes);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
// TODO once sort refactoring is done this needs to be merged into SortBuilder
|
||||
public interface SortElementParserTemp<T extends ToXContent> {
|
||||
/**
|
||||
* Creates a new SortBuilder from the json held by the {@link SortElementParserTemp}
|
||||
* in {@link org.elasticsearch.common.xcontent.XContent} format
|
||||
*
|
||||
* @param context
|
||||
* the input parse context. The state on the parser contained in
|
||||
* this context will be changed as a side effect of this method
|
||||
* call
|
||||
* @return the new item
|
||||
*/
|
||||
T fromXContent(QueryParseContext context, String elementName) throws IOException;
|
||||
}
|
@ -51,8 +51,7 @@ public enum SortOrder implements Writeable<SortOrder> {
|
||||
}
|
||||
};
|
||||
|
||||
public static final SortOrder DEFAULT = DESC;
|
||||
private static final SortOrder PROTOTYPE = DEFAULT;
|
||||
private static final SortOrder PROTOTYPE = ASC;
|
||||
|
||||
@Override
|
||||
public SortOrder readFrom(StreamInput in) throws IOException {
|
||||
|
@ -22,14 +22,13 @@ import org.apache.lucene.search.spell.DirectSpellChecker;
|
||||
import org.apache.lucene.search.spell.StringDistance;
|
||||
import org.apache.lucene.search.spell.SuggestMode;
|
||||
import org.apache.lucene.util.automaton.LevenshteinAutomata;
|
||||
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
|
||||
|
||||
public class DirectSpellcheckerSettings {
|
||||
|
||||
// NB: If this changes, make sure to change the default in TermBuilderSuggester
|
||||
public static SuggestMode DEFAULT_SUGGEST_MODE = SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
|
||||
public static float DEFAULT_ACCURACY = 0.5f;
|
||||
public static TermSuggestionBuilder.SortBy DEFAULT_SORT = TermSuggestionBuilder.SortBy.SCORE;
|
||||
public static SortBy DEFAULT_SORT = SortBy.SCORE;
|
||||
// NB: If this changes, make sure to change the default in TermBuilderSuggester
|
||||
public static StringDistance DEFAULT_STRING_DISTANCE = DirectSpellChecker.INTERNAL_LEVENSHTEIN;
|
||||
public static int DEFAULT_MAX_EDITS = LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE;
|
||||
@ -41,7 +40,7 @@ public class DirectSpellcheckerSettings {
|
||||
|
||||
private SuggestMode suggestMode = DEFAULT_SUGGEST_MODE;
|
||||
private float accuracy = DEFAULT_ACCURACY;
|
||||
private TermSuggestionBuilder.SortBy sort = DEFAULT_SORT;
|
||||
private SortBy sort = DEFAULT_SORT;
|
||||
private StringDistance stringDistance = DEFAULT_STRING_DISTANCE;
|
||||
private int maxEdits = DEFAULT_MAX_EDITS;
|
||||
private int maxInspections = DEFAULT_MAX_INSPECTIONS;
|
||||
@ -66,11 +65,11 @@ public class DirectSpellcheckerSettings {
|
||||
this.accuracy = accuracy;
|
||||
}
|
||||
|
||||
public TermSuggestionBuilder.SortBy sort() {
|
||||
public SortBy sort() {
|
||||
return sort;
|
||||
}
|
||||
|
||||
public void sort(TermSuggestionBuilder.SortBy sort) {
|
||||
public void sort(SortBy sort) {
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An enum representing the valid sorting options
|
||||
*/
|
||||
public enum SortBy implements Writeable<SortBy> {
|
||||
/** Sort should first be based on score, then document frequency and then the term itself. */
|
||||
SCORE,
|
||||
/** Sort should first be based on document frequency, then score and then the term itself. */
|
||||
FREQUENCY;
|
||||
|
||||
public static SortBy PROTOTYPE = SCORE;
|
||||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortBy readFrom(final StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown SortBy ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
public static SortBy resolve(final String str) {
|
||||
Objects.requireNonNull(str, "Input string is null");
|
||||
return valueOf(str.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
}
|
@ -197,7 +197,6 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
|
||||
*/
|
||||
public static class Suggestion<T extends Suggestion.Entry> implements Iterable<T>, Streamable, ToXContent {
|
||||
|
||||
|
||||
public static final int TYPE = 0;
|
||||
protected String name;
|
||||
protected int size;
|
||||
|
@ -26,9 +26,12 @@ import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -60,7 +63,7 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable<Sugge
|
||||
* The suggest text gets analyzed by the suggest analyzer or the suggest field search analyzer.
|
||||
* For each analyzed token, suggested terms are suggested if possible.
|
||||
*/
|
||||
public SuggestBuilder setText(@Nullable String globalText) {
|
||||
public SuggestBuilder setGlobalText(@Nullable String globalText) {
|
||||
this.globalText = globalText;
|
||||
return this;
|
||||
}
|
||||
@ -68,8 +71,9 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable<Sugge
|
||||
/**
|
||||
* Gets the global suggest text
|
||||
*/
|
||||
public String getText() {
|
||||
return null;
|
||||
@Nullable
|
||||
public String getGlobalText() {
|
||||
return globalText;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -95,15 +99,6 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable<Sugge
|
||||
return suggestions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the possibly null global suggest text that
|
||||
* should be applied as the text for all suggesters.
|
||||
*/
|
||||
@Nullable
|
||||
public String getGlobalText() {
|
||||
return globalText;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
@ -129,7 +124,7 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable<Sugge
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(fieldName, GLOBAL_TEXT_FIELD)) {
|
||||
suggestBuilder.setText(parser.text());
|
||||
suggestBuilder.setGlobalText(parser.text());
|
||||
} else {
|
||||
throw new IllegalArgumentException("[suggest] does not support [" + fieldName + "]");
|
||||
}
|
||||
@ -146,6 +141,21 @@ public class SuggestBuilder extends ToXContentToBytes implements Writeable<Sugge
|
||||
return suggestBuilder;
|
||||
}
|
||||
|
||||
public SuggestionSearchContext build(QueryShardContext context) throws IOException {
|
||||
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
|
||||
for (SuggestionBuilder<?> suggestionBuilder : suggestions) {
|
||||
SuggestionContext suggestionContext = suggestionBuilder.build(context);
|
||||
if (suggestionContext.getText() == null) {
|
||||
if (globalText == null) {
|
||||
throw new IllegalArgumentException("The required text option is missing");
|
||||
}
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(globalText));
|
||||
}
|
||||
suggestionSearchContext.addSuggestion(suggestionBuilder.name(), suggestionContext);
|
||||
}
|
||||
return suggestionSearchContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestBuilder readFrom(StreamInput in) throws IOException {
|
||||
final SuggestBuilder builder = new SuggestBuilder();
|
||||
|
@ -98,6 +98,16 @@ public final class SuggestParseElement implements SearchParseElement {
|
||||
}
|
||||
}
|
||||
if (suggestionContext != null) {
|
||||
if (suggestText != null) {
|
||||
suggestionContext.setText(suggestText);
|
||||
}
|
||||
if (prefix != null) {
|
||||
suggestionContext.setPrefix(prefix);
|
||||
}
|
||||
if (regex != null) {
|
||||
suggestionContext.setRegex(regex);
|
||||
}
|
||||
|
||||
if (suggestText != null && prefix == null) {
|
||||
suggestionContext.setPrefix(suggestText);
|
||||
suggestionContext.setText(suggestText);
|
||||
@ -108,7 +118,6 @@ public final class SuggestParseElement implements SearchParseElement {
|
||||
suggestionContext.setRegex(regex);
|
||||
suggestionContext.setText(regex);
|
||||
}
|
||||
suggestionContext.setShardContext(shardContext);
|
||||
suggestionContexts.put(suggestionName, suggestionContext);
|
||||
} else {
|
||||
throw new IllegalArgumentException("suggestion context could not be parsed correctly");
|
||||
|
@ -42,7 +42,6 @@ import org.apache.lucene.util.automaton.LevenshteinAutomata;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.FastCharArrayReader;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.CustomAnalyzer;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
@ -50,12 +49,10 @@ import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Comparator;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class SuggestUtils {
|
||||
public static final Comparator<SuggestWord> LUCENE_FREQUENCY = new SuggestWordFrequencyComparator();
|
||||
@ -173,17 +170,6 @@ public final class SuggestUtils {
|
||||
}
|
||||
}
|
||||
|
||||
public static TermSuggestionBuilder.SortBy resolveSort(String sortVal) {
|
||||
sortVal = sortVal.toLowerCase(Locale.US);
|
||||
if ("score".equals(sortVal)) {
|
||||
return TermSuggestionBuilder.SortBy.SCORE;
|
||||
} else if ("frequency".equals(sortVal)) {
|
||||
return TermSuggestionBuilder.SortBy.FREQUENCY;
|
||||
} else {
|
||||
throw new IllegalArgumentException("Illegal suggest sort " + sortVal);
|
||||
}
|
||||
}
|
||||
|
||||
public static StringDistance resolveDistance(String distanceVal) {
|
||||
distanceVal = distanceVal.toLowerCase(Locale.US);
|
||||
if ("internal".equals(distanceVal)) {
|
||||
@ -202,28 +188,6 @@ public final class SuggestUtils {
|
||||
}
|
||||
}
|
||||
|
||||
public static SuggestMode resolveSuggestMode(TermSuggestionBuilder.SuggestMode suggestMode) {
|
||||
Objects.requireNonNull(suggestMode, "suggestMode must not be null");
|
||||
switch (suggestMode) {
|
||||
case MISSING: return SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
|
||||
case POPULAR: return SuggestMode.SUGGEST_MORE_POPULAR;
|
||||
case ALWAYS: return SuggestMode.SUGGEST_ALWAYS;
|
||||
default: throw new IllegalArgumentException("Unknown suggestMode [" + suggestMode + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public static StringDistance resolveStringDistance(TermSuggestionBuilder.StringDistanceImpl stringDistance) {
|
||||
Objects.requireNonNull(stringDistance, "stringDistance must not be null");
|
||||
switch (stringDistance) {
|
||||
case INTERNAL: return DirectSpellChecker.INTERNAL_LEVENSHTEIN;
|
||||
case DAMERAU_LEVENSHTEIN: return new LuceneLevenshteinDistance();
|
||||
case LEVENSTEIN: return new LevensteinDistance();
|
||||
case JAROWINKLER: return new JaroWinklerDistance();
|
||||
case NGRAM: return new NGramDistance();
|
||||
default: throw new IllegalArgumentException("Illegal distance option " + stringDistance);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Fields {
|
||||
public static final ParseField STRING_DISTANCE = new ParseField("string_distance");
|
||||
public static final ParseField SUGGEST_MODE = new ParseField("suggest_mode");
|
||||
@ -251,7 +215,7 @@ public final class SuggestUtils {
|
||||
} else if (parseFieldMatcher.match(fieldName, Fields.SUGGEST_MODE)) {
|
||||
suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text()));
|
||||
} else if (parseFieldMatcher.match(fieldName, Fields.SORT)) {
|
||||
suggestion.sort(SuggestUtils.resolveSort(parser.text()));
|
||||
suggestion.sort(SortBy.resolve(parser.text()));
|
||||
} else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) {
|
||||
suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text()));
|
||||
} else if (parseFieldMatcher.match(fieldName, Fields.MAX_EDITS)) {
|
||||
@ -297,53 +261,6 @@ public final class SuggestUtils {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transfers the text, prefix, regex, analyzer, fieldname, size and shard size settings from the
|
||||
* original {@link SuggestionBuilder} to the target {@link SuggestionContext}
|
||||
*/
|
||||
public static void suggestionToSuggestionContext(SuggestionBuilder suggestionBuilder, MapperService mapperService,
|
||||
SuggestionSearchContext.SuggestionContext suggestionContext) throws IOException {
|
||||
String analyzerName = suggestionBuilder.analyzer();
|
||||
if (analyzerName != null) {
|
||||
Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName);
|
||||
if (analyzer == null) {
|
||||
throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists");
|
||||
}
|
||||
suggestionContext.setAnalyzer(analyzer);
|
||||
}
|
||||
if (suggestionBuilder.field() != null) {
|
||||
suggestionContext.setField(suggestionBuilder.field());
|
||||
}
|
||||
if (suggestionBuilder.size() != null) {
|
||||
suggestionContext.setSize(suggestionBuilder.size());
|
||||
}
|
||||
if (suggestionBuilder.shardSize() != null) {
|
||||
suggestionContext.setShardSize(suggestionBuilder.shardSize());
|
||||
} else {
|
||||
// if no shard size is set in builder, use size (or at least 5)
|
||||
suggestionContext.setShardSize(Math.max(suggestionContext.getSize(), 5));
|
||||
}
|
||||
String text = suggestionBuilder.text();
|
||||
if (text != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(text));
|
||||
}
|
||||
String prefix = suggestionBuilder.prefix();
|
||||
if (prefix != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(prefix));
|
||||
}
|
||||
String regex = suggestionBuilder.regex();
|
||||
if (regex != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(regex));
|
||||
}
|
||||
if (text != null && prefix == null) {
|
||||
suggestionContext.setPrefix(BytesRefs.toBytesRef(text));
|
||||
} else if (text == null && prefix != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(prefix));
|
||||
} else if (text == null && regex != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(regex));
|
||||
}
|
||||
}
|
||||
|
||||
public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) {
|
||||
// Verify options and set defaults
|
||||
if (suggestion.getField() == null) {
|
||||
@ -363,7 +280,6 @@ public final class SuggestUtils {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) {
|
||||
if (analyzer instanceof NamedAnalyzer) {
|
||||
analyzer = ((NamedAnalyzer)analyzer).analyzer();
|
||||
|
@ -20,8 +20,6 @@ package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.util.ExtensionPoint;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggester;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggester;
|
||||
import org.elasticsearch.search.suggest.term.TermSuggester;
|
||||
@ -42,21 +40,17 @@ public final class Suggesters extends ExtensionPoint.ClassMap<Suggester> {
|
||||
this(Collections.emptyMap());
|
||||
}
|
||||
|
||||
@Inject
|
||||
public Suggesters(Map<String, Suggester> suggesters) {
|
||||
super("suggester", Suggester.class, new HashSet<>(Arrays.asList("phrase", "term", "completion")), Suggesters.class, SuggestParseElement.class, SuggestPhase.class);
|
||||
this.parsers = Collections.unmodifiableMap(suggesters);
|
||||
this.parsers = Collections.unmodifiableMap(addBuildIns(suggesters));
|
||||
}
|
||||
|
||||
@Inject
|
||||
public Suggesters(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
|
||||
this(addBuildIns(suggesters, scriptService, indexServices));
|
||||
}
|
||||
|
||||
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters, ScriptService scriptService, IndicesService indexServices) {
|
||||
private static Map<String, Suggester> addBuildIns(Map<String, Suggester> suggesters) {
|
||||
final Map<String, Suggester> map = new HashMap<>();
|
||||
map.put("phrase", new PhraseSuggester(scriptService));
|
||||
map.put("term", new TermSuggester());
|
||||
map.put("completion", new CompletionSuggester());
|
||||
map.put("phrase", PhraseSuggester.PROTOTYPE);
|
||||
map.put("term", TermSuggester.PROTOTYPE);
|
||||
map.put("completion", CompletionSuggester.PROTOTYPE);
|
||||
map.putAll(suggesters);
|
||||
return map;
|
||||
}
|
||||
|
@ -19,16 +19,18 @@
|
||||
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
@ -72,7 +74,7 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as in {@link SuggestBuilder#setText(String)}, but in the suggestion scope.
|
||||
* Same as in {@link SuggestBuilder#setGlobalText(String)}, but in the suggestion scope.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public T text(String text) {
|
||||
@ -196,12 +198,8 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
|
||||
|
||||
protected abstract SuggestionBuilder<T> innerFromXContent(QueryParseContext parseContext, String name) throws IOException;
|
||||
|
||||
public SuggestionContext build(QueryShardContext context, @Nullable String globalText) throws IOException {
|
||||
public SuggestionContext build(QueryShardContext context) throws IOException {
|
||||
SuggestionContext suggestionContext = innerBuild(context);
|
||||
// copy over common settings to each suggestion builder
|
||||
SuggestUtils.suggestionToSuggestionContext(this, context.getMapperService(), suggestionContext);
|
||||
SuggestUtils.verifySuggestion(context.getMapperService(), new BytesRef(globalText), suggestionContext);
|
||||
suggestionContext.setShardContext(context);
|
||||
// TODO make field mandatory in the builder, then remove this
|
||||
if (suggestionContext.getField() == null) {
|
||||
throw new IllegalArgumentException("The required field option is missing");
|
||||
@ -211,7 +209,65 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
|
||||
|
||||
protected abstract SuggestionContext innerBuild(QueryShardContext context) throws IOException;
|
||||
|
||||
public String getSuggesterName() {
|
||||
/**
|
||||
* Transfers the text, prefix, regex, analyzer, fieldname, size and shard size settings from the
|
||||
* original {@link SuggestionBuilder} to the target {@link SuggestionContext}
|
||||
*/
|
||||
protected void populateCommonFields(MapperService mapperService,
|
||||
SuggestionSearchContext.SuggestionContext suggestionContext) throws IOException {
|
||||
|
||||
Objects.requireNonNull(fieldname, "fieldname must not be null");
|
||||
|
||||
MappedFieldType fieldType = mapperService.fullName(fieldname);
|
||||
if (fieldType == null) {
|
||||
throw new IllegalArgumentException("no mapping found for field [" + fieldname + "]");
|
||||
} else if (analyzer == null) {
|
||||
// no analyzer name passed in, so try the field's analyzer, or the default analyzer
|
||||
if (fieldType.searchAnalyzer() == null) {
|
||||
suggestionContext.setAnalyzer(mapperService.searchAnalyzer());
|
||||
} else {
|
||||
suggestionContext.setAnalyzer(fieldType.searchAnalyzer());
|
||||
}
|
||||
} else {
|
||||
Analyzer luceneAnalyzer = mapperService.analysisService().analyzer(analyzer);
|
||||
if (luceneAnalyzer == null) {
|
||||
throw new IllegalArgumentException("analyzer [" + analyzer + "] doesn't exists");
|
||||
}
|
||||
suggestionContext.setAnalyzer(luceneAnalyzer);
|
||||
}
|
||||
|
||||
suggestionContext.setField(fieldname);
|
||||
|
||||
if (size != null) {
|
||||
suggestionContext.setSize(size);
|
||||
}
|
||||
|
||||
if (shardSize != null) {
|
||||
suggestionContext.setShardSize(shardSize);
|
||||
} else {
|
||||
// if no shard size is set in builder, use size (or at least 5)
|
||||
suggestionContext.setShardSize(Math.max(suggestionContext.getSize(), 5));
|
||||
}
|
||||
|
||||
if (text != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(text));
|
||||
}
|
||||
if (prefix != null) {
|
||||
suggestionContext.setPrefix(BytesRefs.toBytesRef(prefix));
|
||||
}
|
||||
if (regex != null) {
|
||||
suggestionContext.setRegex(BytesRefs.toBytesRef(regex));
|
||||
}
|
||||
if (text != null && prefix == null) {
|
||||
suggestionContext.setPrefix(BytesRefs.toBytesRef(text));
|
||||
} else if (text == null && prefix != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(prefix));
|
||||
} else if (text == null && regex != null) {
|
||||
suggestionContext.setText(BytesRefs.toBytesRef(regex));
|
||||
}
|
||||
}
|
||||
|
||||
private String getSuggesterName() {
|
||||
//default impl returns the same as writeable name, but we keep the distinction between the two just to make sure
|
||||
return getWriteableName();
|
||||
}
|
||||
@ -225,6 +281,7 @@ public abstract class SuggestionBuilder<T extends SuggestionBuilder<T>> extends
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public T field(String field) {
|
||||
Objects.requireNonNull(field, "fieldname must not be null");
|
||||
this.fieldname = field;
|
||||
return (T)this;
|
||||
}
|
||||
|
@ -38,17 +38,22 @@ public class SuggestionSearchContext {
|
||||
return suggestions;
|
||||
}
|
||||
|
||||
public static class SuggestionContext {
|
||||
public abstract static class SuggestionContext {
|
||||
|
||||
private BytesRef text;
|
||||
private BytesRef prefix;
|
||||
private BytesRef regex;
|
||||
private final Suggester suggester;
|
||||
private String field;
|
||||
private Analyzer analyzer;
|
||||
private int size = 5;
|
||||
private int shardSize = -1;
|
||||
private QueryShardContext shardContext;
|
||||
private Suggester<?> suggester;
|
||||
|
||||
protected SuggestionContext(Suggester<?> suggester, QueryShardContext shardContext) {
|
||||
this.suggester = suggester;
|
||||
this.shardContext = shardContext;
|
||||
}
|
||||
|
||||
public BytesRef getText() {
|
||||
return text;
|
||||
@ -74,12 +79,8 @@ public class SuggestionSearchContext {
|
||||
this.regex = regex;
|
||||
}
|
||||
|
||||
public SuggestionContext(Suggester suggester) {
|
||||
this.suggester = suggester;
|
||||
}
|
||||
|
||||
public Suggester<SuggestionContext> getSuggester() {
|
||||
return this.suggester;
|
||||
return ((Suggester<SuggestionContext>) suggester);
|
||||
}
|
||||
|
||||
public Analyzer getAnalyzer() {
|
||||
@ -120,10 +121,6 @@ public class SuggestionSearchContext {
|
||||
this.shardSize = shardSize;
|
||||
}
|
||||
|
||||
public void setShardContext(QueryShardContext context) {
|
||||
this.shardContext = context;
|
||||
}
|
||||
|
||||
public QueryShardContext getShardContext() {
|
||||
return this.shardContext;
|
||||
}
|
||||
|
@ -136,7 +136,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException {
|
||||
MapperService mapperService = shardContext.getMapperService();
|
||||
final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester, mapperService);
|
||||
final CompletionSuggestionContext suggestion = new CompletionSuggestionContext(shardContext);
|
||||
final ContextAndSuggest contextAndSuggest = new ContextAndSuggest(mapperService);
|
||||
TLP_PARSER.parse(parser, suggestion, contextAndSuggest);
|
||||
final XContentParser contextParser = contextAndSuggest.contextParser;
|
||||
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.fielddata.AtomicFieldData;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
@ -52,7 +53,7 @@ import java.util.Set;
|
||||
|
||||
public class CompletionSuggester extends Suggester<CompletionSuggestionContext> {
|
||||
|
||||
static final CompletionSuggester PROTOTYPE = new CompletionSuggester();
|
||||
public static final CompletionSuggester PROTOTYPE = new CompletionSuggester();
|
||||
|
||||
@Override
|
||||
public SuggestContextParser getContextParser() {
|
||||
@ -89,7 +90,8 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
|
||||
final LeafReaderContext subReaderContext = leaves.get(readerIndex);
|
||||
final int subDocId = suggestDoc.doc - subReaderContext.docBase;
|
||||
for (String field : payloadFields) {
|
||||
MappedFieldType payloadFieldType = suggestionContext.getMapperService().fullName(field);
|
||||
MapperService mapperService = suggestionContext.getShardContext().getMapperService();
|
||||
MappedFieldType payloadFieldType = mapperService.fullName(field);
|
||||
if (payloadFieldType != null) {
|
||||
QueryShardContext shardContext = suggestionContext.getShardContext();
|
||||
final AtomicFieldData data = shardContext.getForField(payloadFieldType)
|
||||
|
@ -380,8 +380,12 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
|
||||
|
||||
@Override
|
||||
protected SuggestionContext innerBuild(QueryShardContext context) throws IOException {
|
||||
CompletionSuggestionContext suggestionContext = new CompletionSuggestionContext(context);
|
||||
// copy over common settings to each suggestion builder
|
||||
populateCommonFields(context.getMapperService(), suggestionContext);
|
||||
// NORELEASE
|
||||
throw new UnsupportedOperationException();
|
||||
// still need to populate CompletionSuggestionContext's specific settings
|
||||
return suggestionContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -20,9 +20,8 @@ package org.elasticsearch.search.suggest.completion;
|
||||
|
||||
import org.apache.lucene.search.suggest.document.CompletionQuery;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
|
||||
import org.elasticsearch.search.suggest.Suggester;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
|
||||
import org.elasticsearch.search.suggest.completion.context.ContextMappings;
|
||||
@ -38,18 +37,16 @@ import java.util.Set;
|
||||
*/
|
||||
public class CompletionSuggestionContext extends SuggestionSearchContext.SuggestionContext {
|
||||
|
||||
protected CompletionSuggestionContext(QueryShardContext shardContext) {
|
||||
super(CompletionSuggester.PROTOTYPE, shardContext);
|
||||
}
|
||||
|
||||
private CompletionFieldMapper.CompletionFieldType fieldType;
|
||||
private CompletionSuggestionBuilder.FuzzyOptionsBuilder fuzzyOptionsBuilder;
|
||||
private CompletionSuggestionBuilder.RegexOptionsBuilder regexOptionsBuilder;
|
||||
private Map<String, List<ContextMapping.QueryContext>> queryContexts = Collections.emptyMap();
|
||||
private final MapperService mapperService;
|
||||
private Set<String> payloadFields = Collections.emptySet();
|
||||
|
||||
CompletionSuggestionContext(Suggester suggester, MapperService mapperService) {
|
||||
super(suggester);
|
||||
this.mapperService = mapperService;
|
||||
}
|
||||
|
||||
CompletionFieldMapper.CompletionFieldType getFieldType() {
|
||||
return this.fieldType;
|
||||
}
|
||||
@ -70,11 +67,6 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest
|
||||
this.queryContexts = queryContexts;
|
||||
}
|
||||
|
||||
|
||||
MapperService getMapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
void setPayloadFields(Set<String> fields) {
|
||||
this.payloadFields = fields;
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.suggest.SortBy;
|
||||
import org.elasticsearch.search.suggest.SuggestUtils;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.CandidateGenerator;
|
||||
|
||||
@ -369,7 +370,7 @@ public final class DirectCandidateGeneratorBuilder
|
||||
generator.suggestMode(SuggestUtils.resolveSuggestMode(this.suggestMode));
|
||||
}
|
||||
if (this.sort != null) {
|
||||
generator.sort(SuggestUtils.resolveSort(this.sort));
|
||||
generator.sort(SortBy.resolve(this.sort));
|
||||
}
|
||||
if (this.stringDistance != null) {
|
||||
generator.stringDistance(SuggestUtils.resolveDistance(this.stringDistance));
|
||||
|
@ -0,0 +1,126 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An <a href="http://en.wikipedia.org/wiki/Additive_smoothing">additive
|
||||
* smoothing</a> model.
|
||||
* <p>
|
||||
* See <a
|
||||
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
|
||||
* Smoothing</a> for details.
|
||||
* </p>
|
||||
*/
|
||||
public final class Laplace extends SmoothingModel {
|
||||
private double alpha = DEFAULT_LAPLACE_ALPHA;
|
||||
private static final String NAME = "laplace";
|
||||
private static final ParseField ALPHA_FIELD = new ParseField("alpha");
|
||||
static final ParseField PARSE_FIELD = new ParseField(NAME);
|
||||
/**
|
||||
* Default alpha parameter for laplace smoothing
|
||||
*/
|
||||
public static final double DEFAULT_LAPLACE_ALPHA = 0.5;
|
||||
public static final Laplace PROTOTYPE = new Laplace(DEFAULT_LAPLACE_ALPHA);
|
||||
|
||||
/**
|
||||
* Creates a Laplace smoothing model.
|
||||
*
|
||||
*/
|
||||
public Laplace(double alpha) {
|
||||
this.alpha = alpha;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the laplace model alpha parameter
|
||||
*/
|
||||
public double getAlpha() {
|
||||
return this.alpha;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(ALPHA_FIELD.getPreferredName(), alpha);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SmoothingModel readFrom(StreamInput in) throws IOException {
|
||||
return new Laplace(in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(SmoothingModel other) {
|
||||
Laplace otherModel = (Laplace) other;
|
||||
return Objects.equals(alpha, otherModel.alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final int doHashCode() {
|
||||
return Objects.hash(alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
double alpha = DEFAULT_LAPLACE_ALPHA;
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
}
|
||||
if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, ALPHA_FIELD)) {
|
||||
alpha = parser.doubleValue();
|
||||
}
|
||||
}
|
||||
return new Laplace(alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
public WordScorerFactory buildWordScorerFactory() {
|
||||
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
|
||||
-> new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha);
|
||||
}
|
||||
}
|
@ -0,0 +1,176 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Linear interpolation smoothing model.
|
||||
* <p>
|
||||
* See <a
|
||||
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
|
||||
* Smoothing</a> for details.
|
||||
* </p>
|
||||
*/
|
||||
public final class LinearInterpolation extends SmoothingModel {
|
||||
private static final String NAME = "linear";
|
||||
public static final LinearInterpolation PROTOTYPE = new LinearInterpolation(0.8, 0.1, 0.1);
|
||||
private final double trigramLambda;
|
||||
private final double bigramLambda;
|
||||
private final double unigramLambda;
|
||||
static final ParseField PARSE_FIELD = new ParseField(NAME);
|
||||
private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda");
|
||||
private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda");
|
||||
private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda");
|
||||
|
||||
/**
|
||||
* Creates a linear interpolation smoothing model.
|
||||
*
|
||||
* Note: the lambdas must sum up to one.
|
||||
*
|
||||
* @param trigramLambda
|
||||
* the trigram lambda
|
||||
* @param bigramLambda
|
||||
* the bigram lambda
|
||||
* @param unigramLambda
|
||||
* the unigram lambda
|
||||
*/
|
||||
public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) {
|
||||
double sum = trigramLambda + bigramLambda + unigramLambda;
|
||||
if (Math.abs(sum - 1.0) > 0.001) {
|
||||
throw new IllegalArgumentException("linear smoothing lambdas must sum to 1");
|
||||
}
|
||||
this.trigramLambda = trigramLambda;
|
||||
this.bigramLambda = bigramLambda;
|
||||
this.unigramLambda = unigramLambda;
|
||||
}
|
||||
|
||||
public double getTrigramLambda() {
|
||||
return this.trigramLambda;
|
||||
}
|
||||
|
||||
public double getBigramLambda() {
|
||||
return this.bigramLambda;
|
||||
}
|
||||
|
||||
public double getUnigramLambda() {
|
||||
return this.unigramLambda;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(TRIGRAM_FIELD.getPreferredName(), trigramLambda);
|
||||
builder.field(BIGRAM_FIELD.getPreferredName(), bigramLambda);
|
||||
builder.field(UNIGRAM_FIELD.getPreferredName(), unigramLambda);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(trigramLambda);
|
||||
out.writeDouble(bigramLambda);
|
||||
out.writeDouble(unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinearInterpolation readFrom(StreamInput in) throws IOException {
|
||||
return new LinearInterpolation(in.readDouble(), in.readDouble(), in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(SmoothingModel other) {
|
||||
final LinearInterpolation otherModel = (LinearInterpolation) other;
|
||||
return Objects.equals(trigramLambda, otherModel.trigramLambda) &&
|
||||
Objects.equals(bigramLambda, otherModel.bigramLambda) &&
|
||||
Objects.equals(unigramLambda, otherModel.unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final int doHashCode() {
|
||||
return Objects.hash(trigramLambda, bigramLambda, unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinearInterpolation innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
double trigramLambda = 0.0;
|
||||
double bigramLambda = 0.0;
|
||||
double unigramLambda = 0.0;
|
||||
ParseFieldMatcher matcher = parseContext.parseFieldMatcher();
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (matcher.match(fieldName, TRIGRAM_FIELD)) {
|
||||
trigramLambda = parser.doubleValue();
|
||||
if (trigramLambda < 0) {
|
||||
throw new IllegalArgumentException("trigram_lambda must be positive");
|
||||
}
|
||||
} else if (matcher.match(fieldName, BIGRAM_FIELD)) {
|
||||
bigramLambda = parser.doubleValue();
|
||||
if (bigramLambda < 0) {
|
||||
throw new IllegalArgumentException("bigram_lambda must be positive");
|
||||
}
|
||||
} else if (matcher.match(fieldName, UNIGRAM_FIELD)) {
|
||||
unigramLambda = parser.doubleValue();
|
||||
if (unigramLambda < 0) {
|
||||
throw new IllegalArgumentException("unigram_lambda must be positive");
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
return new LinearInterpolation(trigramLambda, bigramLambda, unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
public WordScorerFactory buildWordScorerFactory() {
|
||||
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) ->
|
||||
new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, trigramLambda, bigramLambda,
|
||||
unigramLambda);
|
||||
}
|
||||
}
|
@ -31,12 +31,11 @@ import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.suggest.SuggestContextParser;
|
||||
import org.elasticsearch.search.suggest.SuggestUtils;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.Laplace;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder.StupidBackoff;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -53,7 +52,8 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, QueryShardContext shardContext) throws IOException {
|
||||
MapperService mapperService = shardContext.getMapperService();
|
||||
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
|
||||
ScriptService scriptService = shardContext.getScriptService();
|
||||
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(shardContext);
|
||||
ParseFieldMatcher parseFieldMatcher = mapperService.getIndexSettings().getParseFieldMatcher();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
@ -136,7 +136,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
|
||||
}
|
||||
Template template = Template.parse(parser, parseFieldMatcher);
|
||||
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
CompiledScript compiledScript = scriptService.compile(template, ScriptContext.Standard.SEARCH, Collections.emptyMap());
|
||||
suggestion.setCollateQueryScript(compiledScript);
|
||||
} else if ("params".equals(fieldName)) {
|
||||
suggestion.setCollateScriptParams(parser.map());
|
||||
|
@ -53,14 +53,8 @@ import java.util.Map;
|
||||
public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
|
||||
private final BytesRef SEPARATOR = new BytesRef(" ");
|
||||
private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion";
|
||||
private final ScriptService scriptService;
|
||||
|
||||
static PhraseSuggester PROTOTYPE;
|
||||
|
||||
public PhraseSuggester(ScriptService scriptService) {
|
||||
this.scriptService = scriptService;
|
||||
PROTOTYPE = this;
|
||||
}
|
||||
public static final PhraseSuggester PROTOTYPE = new PhraseSuggester();
|
||||
|
||||
/*
|
||||
* More Ideas:
|
||||
@ -118,6 +112,7 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
|
||||
// from the index for a correction, collateMatch is updated
|
||||
final Map<String, Object> vars = suggestion.getCollateScriptParams();
|
||||
vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
|
||||
ScriptService scriptService = suggestion.getShardContext().getScriptService();
|
||||
final ExecutableScript executable = scriptService.executable(collateScript, vars);
|
||||
final BytesReference querySource = (BytesReference) executable.run();
|
||||
final ParsedQuery parsedQuery = suggestion.getShardContext().parse(querySource);
|
||||
@ -149,10 +144,6 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
|
||||
return new PhraseSuggestion.Entry(new Text(spare.toString()), 0, spare.length(), cutoffScore);
|
||||
}
|
||||
|
||||
ScriptService scriptService() {
|
||||
return scriptService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestContextParser getContextParser() {
|
||||
return new PhraseSuggestParser(this);
|
||||
|
@ -19,13 +19,8 @@
|
||||
package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
@ -46,7 +41,6 @@ import org.elasticsearch.search.suggest.SuggestUtils;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator;
|
||||
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -250,7 +244,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
|
||||
/**
|
||||
* Sets an explicit smoothing model used for this suggester. The default is
|
||||
* {@link PhraseSuggestionBuilder.StupidBackoff}.
|
||||
* {@link StupidBackoff}.
|
||||
*/
|
||||
public PhraseSuggestionBuilder smoothingModel(SmoothingModel model) {
|
||||
this.model = model;
|
||||
@ -403,402 +397,6 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link DirectCandidateGeneratorBuilder}
|
||||
*
|
||||
* @param field
|
||||
* the field this candidate generator operates on.
|
||||
*/
|
||||
public static DirectCandidateGeneratorBuilder candidateGenerator(String field) {
|
||||
return new DirectCandidateGeneratorBuilder(field);
|
||||
}
|
||||
|
||||
/**
|
||||
* A "stupid-backoff" smoothing model simialr to <a
|
||||
* href="http://en.wikipedia.org/wiki/Katz's_back-off_model"> Katz's
|
||||
* Backoff</a>. This model is used as the default if no model is configured.
|
||||
* <p>
|
||||
* See <a
|
||||
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
|
||||
* Smoothing</a> for details.
|
||||
* </p>
|
||||
*/
|
||||
public static final class StupidBackoff extends SmoothingModel {
|
||||
/**
|
||||
* Default discount parameter for {@link StupidBackoff} smoothing
|
||||
*/
|
||||
public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4;
|
||||
public static final StupidBackoff PROTOTYPE = new StupidBackoff(DEFAULT_BACKOFF_DISCOUNT);
|
||||
private double discount = DEFAULT_BACKOFF_DISCOUNT;
|
||||
private static final String NAME = "stupid_backoff";
|
||||
private static final ParseField DISCOUNT_FIELD = new ParseField("discount");
|
||||
private static final ParseField PARSE_FIELD = new ParseField(NAME);
|
||||
|
||||
/**
|
||||
* Creates a Stupid-Backoff smoothing model.
|
||||
*
|
||||
* @param discount
|
||||
* the discount given to lower order ngrams if the higher order ngram doesn't exits
|
||||
*/
|
||||
public StupidBackoff(double discount) {
|
||||
this.discount = discount;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the discount parameter of the model
|
||||
*/
|
||||
public double getDiscount() {
|
||||
return this.discount;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(DISCOUNT_FIELD.getPreferredName(), discount);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StupidBackoff readFrom(StreamInput in) throws IOException {
|
||||
return new StupidBackoff(in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(SmoothingModel other) {
|
||||
StupidBackoff otherModel = (StupidBackoff) other;
|
||||
return Objects.equals(discount, otherModel.discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
return Objects.hash(discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
double discount = DEFAULT_BACKOFF_DISCOUNT;
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
}
|
||||
if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, DISCOUNT_FIELD)) {
|
||||
discount = parser.doubleValue();
|
||||
}
|
||||
}
|
||||
return new StupidBackoff(discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public WordScorerFactory buildWordScorerFactory() {
|
||||
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
|
||||
-> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An <a href="http://en.wikipedia.org/wiki/Additive_smoothing">additive
|
||||
* smoothing</a> model.
|
||||
* <p>
|
||||
* See <a
|
||||
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
|
||||
* Smoothing</a> for details.
|
||||
* </p>
|
||||
*/
|
||||
public static final class Laplace extends SmoothingModel {
|
||||
private double alpha = DEFAULT_LAPLACE_ALPHA;
|
||||
private static final String NAME = "laplace";
|
||||
private static final ParseField ALPHA_FIELD = new ParseField("alpha");
|
||||
private static final ParseField PARSE_FIELD = new ParseField(NAME);
|
||||
/**
|
||||
* Default alpha parameter for laplace smoothing
|
||||
*/
|
||||
public static final double DEFAULT_LAPLACE_ALPHA = 0.5;
|
||||
public static final Laplace PROTOTYPE = new Laplace(DEFAULT_LAPLACE_ALPHA);
|
||||
|
||||
/**
|
||||
* Creates a Laplace smoothing model.
|
||||
*
|
||||
*/
|
||||
public Laplace(double alpha) {
|
||||
this.alpha = alpha;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the laplace model alpha parameter
|
||||
*/
|
||||
public double getAlpha() {
|
||||
return this.alpha;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(ALPHA_FIELD.getPreferredName(), alpha);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SmoothingModel readFrom(StreamInput in) throws IOException {
|
||||
return new Laplace(in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(SmoothingModel other) {
|
||||
Laplace otherModel = (Laplace) other;
|
||||
return Objects.equals(alpha, otherModel.alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
return Objects.hash(alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
double alpha = DEFAULT_LAPLACE_ALPHA;
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
}
|
||||
if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, ALPHA_FIELD)) {
|
||||
alpha = parser.doubleValue();
|
||||
}
|
||||
}
|
||||
return new Laplace(alpha);
|
||||
}
|
||||
|
||||
@Override
|
||||
public WordScorerFactory buildWordScorerFactory() {
|
||||
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
|
||||
-> new LaplaceScorer(reader, terms, field, realWordLikelyhood, separator, alpha);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static abstract class SmoothingModel implements NamedWriteable<SmoothingModel>, ToXContent {
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(getWriteableName());
|
||||
innerToXContent(builder,params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
SmoothingModel other = (SmoothingModel) obj;
|
||||
return doEquals(other);
|
||||
}
|
||||
|
||||
public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
SmoothingModel model = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) {
|
||||
model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext);
|
||||
} else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) {
|
||||
model = Laplace.PROTOTYPE.innerFromXContent(parseContext);
|
||||
} else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) {
|
||||
model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext);
|
||||
} else {
|
||||
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[smoothing] unknown token [" + token + "] after [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException;
|
||||
|
||||
public abstract WordScorerFactory buildWordScorerFactory();
|
||||
|
||||
/**
|
||||
* subtype specific implementation of "equals".
|
||||
*/
|
||||
protected abstract boolean doEquals(SmoothingModel other);
|
||||
|
||||
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
}
|
||||
|
||||
/**
|
||||
* Linear interpolation smoothing model.
|
||||
* <p>
|
||||
* See <a
|
||||
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
|
||||
* Smoothing</a> for details.
|
||||
* </p>
|
||||
*/
|
||||
public static final class LinearInterpolation extends SmoothingModel {
|
||||
private static final String NAME = "linear";
|
||||
public static final LinearInterpolation PROTOTYPE = new LinearInterpolation(0.8, 0.1, 0.1);
|
||||
private final double trigramLambda;
|
||||
private final double bigramLambda;
|
||||
private final double unigramLambda;
|
||||
private static final ParseField PARSE_FIELD = new ParseField(NAME);
|
||||
private static final ParseField TRIGRAM_FIELD = new ParseField("trigram_lambda");
|
||||
private static final ParseField BIGRAM_FIELD = new ParseField("bigram_lambda");
|
||||
private static final ParseField UNIGRAM_FIELD = new ParseField("unigram_lambda");
|
||||
|
||||
/**
|
||||
* Creates a linear interpolation smoothing model.
|
||||
*
|
||||
* Note: the lambdas must sum up to one.
|
||||
*
|
||||
* @param trigramLambda
|
||||
* the trigram lambda
|
||||
* @param bigramLambda
|
||||
* the bigram lambda
|
||||
* @param unigramLambda
|
||||
* the unigram lambda
|
||||
*/
|
||||
public LinearInterpolation(double trigramLambda, double bigramLambda, double unigramLambda) {
|
||||
double sum = trigramLambda + bigramLambda + unigramLambda;
|
||||
if (Math.abs(sum - 1.0) > 0.001) {
|
||||
throw new IllegalArgumentException("linear smoothing lambdas must sum to 1");
|
||||
}
|
||||
this.trigramLambda = trigramLambda;
|
||||
this.bigramLambda = bigramLambda;
|
||||
this.unigramLambda = unigramLambda;
|
||||
}
|
||||
|
||||
public double getTrigramLambda() {
|
||||
return this.trigramLambda;
|
||||
}
|
||||
|
||||
public double getBigramLambda() {
|
||||
return this.bigramLambda;
|
||||
}
|
||||
|
||||
public double getUnigramLambda() {
|
||||
return this.unigramLambda;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(TRIGRAM_FIELD.getPreferredName(), trigramLambda);
|
||||
builder.field(BIGRAM_FIELD.getPreferredName(), bigramLambda);
|
||||
builder.field(UNIGRAM_FIELD.getPreferredName(), unigramLambda);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(trigramLambda);
|
||||
out.writeDouble(bigramLambda);
|
||||
out.writeDouble(unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinearInterpolation readFrom(StreamInput in) throws IOException {
|
||||
return new LinearInterpolation(in.readDouble(), in.readDouble(), in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(SmoothingModel other) {
|
||||
final LinearInterpolation otherModel = (LinearInterpolation) other;
|
||||
return Objects.equals(trigramLambda, otherModel.trigramLambda) &&
|
||||
Objects.equals(bigramLambda, otherModel.bigramLambda) &&
|
||||
Objects.equals(unigramLambda, otherModel.unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
return Objects.hash(trigramLambda, bigramLambda, unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinearInterpolation innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
double trigramLambda = 0.0;
|
||||
double bigramLambda = 0.0;
|
||||
double unigramLambda = 0.0;
|
||||
ParseFieldMatcher matcher = parseContext.parseFieldMatcher();
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (matcher.match(fieldName, TRIGRAM_FIELD)) {
|
||||
trigramLambda = parser.doubleValue();
|
||||
if (trigramLambda < 0) {
|
||||
throw new IllegalArgumentException("trigram_lambda must be positive");
|
||||
}
|
||||
} else if (matcher.match(fieldName, BIGRAM_FIELD)) {
|
||||
bigramLambda = parser.doubleValue();
|
||||
if (bigramLambda < 0) {
|
||||
throw new IllegalArgumentException("bigram_lambda must be positive");
|
||||
}
|
||||
} else if (matcher.match(fieldName, UNIGRAM_FIELD)) {
|
||||
unigramLambda = parser.doubleValue();
|
||||
if (unigramLambda < 0) {
|
||||
throw new IllegalArgumentException("unigram_lambda must be positive");
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"suggester[phrase][smoothing][linear] doesn't support field [" + fieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + NAME + "] unknown token [" + token + "] after [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
return new LinearInterpolation(trigramLambda, bigramLambda, unigramLambda);
|
||||
}
|
||||
|
||||
@Override
|
||||
public WordScorerFactory buildWordScorerFactory() {
|
||||
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator) ->
|
||||
new LinearInterpoatingScorer(reader, terms, field, realWordLikelyhood, separator, trigramLambda, bigramLambda,
|
||||
unigramLambda);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PhraseSuggestionBuilder innerFromXContent(QueryParseContext parseContext, String suggestionName) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
@ -876,7 +474,6 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
"suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
|
||||
}
|
||||
Template template = Template.parse(parser, parseFieldMatcher);
|
||||
// TODO remember to compile script in build() method
|
||||
suggestion.collateQuery(template);
|
||||
} else if (parseFieldMatcher.match(fieldName, PhraseSuggestionBuilder.COLLATE_QUERY_PARAMS)) {
|
||||
suggestion.collateParams(parser.map());
|
||||
@ -904,9 +501,10 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
|
||||
@Override
|
||||
public SuggestionContext innerBuild(QueryShardContext context) throws IOException {
|
||||
PhraseSuggestionContext suggestionContext = new PhraseSuggestionContext(PhraseSuggester.PROTOTYPE);
|
||||
PhraseSuggestionContext suggestionContext = new PhraseSuggestionContext(context);
|
||||
MapperService mapperService = context.getMapperService();
|
||||
suggestionContext.setShardContext(context);
|
||||
// copy over common settings to each suggestion builder
|
||||
populateCommonFields(mapperService, suggestionContext);
|
||||
|
||||
suggestionContext.setSeparator(BytesRefs.toBytesRef(this.separator));
|
||||
suggestionContext.setRealWordErrorLikelihood(this.realWordErrorLikelihood);
|
||||
@ -942,18 +540,6 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
suggestionContext.setCollatePrune(this.collatePrune);
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = mapperService.fullName(suggestionContext.getField());
|
||||
if (fieldType == null) {
|
||||
throw new IllegalArgumentException("No mapping found for field [" + suggestionContext.getField() + "]");
|
||||
} else if (suggestionContext.getAnalyzer() == null) {
|
||||
// no analyzer name passed in, so try the field's analyzer, or the default analyzer
|
||||
if (fieldType.searchAnalyzer() == null) {
|
||||
suggestionContext.setAnalyzer(mapperService.searchAnalyzer());
|
||||
} else {
|
||||
suggestionContext.setAnalyzer(fieldType.searchAnalyzer());
|
||||
}
|
||||
}
|
||||
|
||||
if (suggestionContext.model() == null) {
|
||||
suggestionContext.setModel(StupidBackoffScorer.FACTORY);
|
||||
}
|
||||
|
@ -20,9 +20,9 @@ package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
|
||||
import org.elasticsearch.search.suggest.Suggester;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
|
||||
import java.util.ArrayList;
|
||||
@ -30,7 +30,7 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class PhraseSuggestionContext extends SuggestionContext {
|
||||
class PhraseSuggestionContext extends SuggestionContext {
|
||||
static final boolean DEFAULT_COLLATE_PRUNE = false;
|
||||
static final boolean DEFAULT_REQUIRE_UNIGRAM = true;
|
||||
static final float DEFAULT_CONFIDENCE = 1.0f;
|
||||
@ -54,8 +54,8 @@ public class PhraseSuggestionContext extends SuggestionContext {
|
||||
private Map<String, Object> collateScriptParams = new HashMap<>(1);
|
||||
private WordScorer.WordScorerFactory scorer;
|
||||
|
||||
public PhraseSuggestionContext(Suggester<? extends PhraseSuggestionContext> suggester) {
|
||||
super(suggester);
|
||||
public PhraseSuggestionContext(QueryShardContext shardContext) {
|
||||
super(PhraseSuggester.PROTOTYPE, shardContext);
|
||||
}
|
||||
|
||||
public float maxErrors() {
|
||||
@ -154,8 +154,6 @@ public class PhraseSuggestionContext extends SuggestionContext {
|
||||
public void postFilter(Analyzer postFilter) {
|
||||
this.postFilter = postFilter;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
public void setRequireUnigram(boolean requireUnigram) {
|
||||
@ -213,5 +211,4 @@ public class PhraseSuggestionContext extends SuggestionContext {
|
||||
boolean collatePrune() {
|
||||
return prune;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public abstract class SmoothingModel implements NamedWriteable<SmoothingModel>, ToXContent {
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(getWriteableName());
|
||||
innerToXContent(builder,params);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
SmoothingModel other = (SmoothingModel) obj;
|
||||
return doEquals(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
/*
|
||||
* Override hashCode here and forward to an abstract method to force
|
||||
* extensions of this class to override hashCode in the same way that we
|
||||
* force them to override equals. This also prevents false positives in
|
||||
* CheckStyle's EqualsHashCode check.
|
||||
*/
|
||||
return doHashCode();
|
||||
}
|
||||
|
||||
protected abstract int doHashCode();
|
||||
|
||||
public static SmoothingModel fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
ParseFieldMatcher parseFieldMatcher = parseContext.parseFieldMatcher();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
SmoothingModel model = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(fieldName, LinearInterpolation.PARSE_FIELD)) {
|
||||
model = LinearInterpolation.PROTOTYPE.innerFromXContent(parseContext);
|
||||
} else if (parseFieldMatcher.match(fieldName, Laplace.PARSE_FIELD)) {
|
||||
model = Laplace.PROTOTYPE.innerFromXContent(parseContext);
|
||||
} else if (parseFieldMatcher.match(fieldName, StupidBackoff.PARSE_FIELD)) {
|
||||
model = StupidBackoff.PROTOTYPE.innerFromXContent(parseContext);
|
||||
} else {
|
||||
throw new IllegalArgumentException("suggester[phrase] doesn't support object field [" + fieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[smoothing] unknown token [" + token + "] after [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
return model;
|
||||
}
|
||||
|
||||
public abstract SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException;
|
||||
|
||||
public abstract WordScorerFactory buildWordScorerFactory();
|
||||
|
||||
/**
|
||||
* subtype specific implementation of "equals".
|
||||
*/
|
||||
protected abstract boolean doEquals(SmoothingModel other);
|
||||
|
||||
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
}
|
@ -0,0 +1,129 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.suggest.phrase.WordScorer.WordScorerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A "stupid-backoff" smoothing model simialr to <a
|
||||
* href="http://en.wikipedia.org/wiki/Katz's_back-off_model"> Katz's
|
||||
* Backoff</a>. This model is used as the default if no model is configured.
|
||||
* <p>
|
||||
* See <a
|
||||
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
|
||||
* Smoothing</a> for details.
|
||||
* </p>
|
||||
*/
|
||||
public final class StupidBackoff extends SmoothingModel {
|
||||
/**
|
||||
* Default discount parameter for {@link StupidBackoff} smoothing
|
||||
*/
|
||||
public static final double DEFAULT_BACKOFF_DISCOUNT = 0.4;
|
||||
public static final StupidBackoff PROTOTYPE = new StupidBackoff(DEFAULT_BACKOFF_DISCOUNT);
|
||||
private double discount = DEFAULT_BACKOFF_DISCOUNT;
|
||||
private static final String NAME = "stupid_backoff";
|
||||
private static final ParseField DISCOUNT_FIELD = new ParseField("discount");
|
||||
static final ParseField PARSE_FIELD = new ParseField(NAME);
|
||||
|
||||
/**
|
||||
* Creates a Stupid-Backoff smoothing model.
|
||||
*
|
||||
* @param discount
|
||||
* the discount given to lower order ngrams if the higher order ngram doesn't exits
|
||||
*/
|
||||
public StupidBackoff(double discount) {
|
||||
this.discount = discount;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the discount parameter of the model
|
||||
*/
|
||||
public double getDiscount() {
|
||||
return this.discount;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(DISCOUNT_FIELD.getPreferredName(), discount);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeDouble(discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public StupidBackoff readFrom(StreamInput in) throws IOException {
|
||||
return new StupidBackoff(in.readDouble());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(SmoothingModel other) {
|
||||
StupidBackoff otherModel = (StupidBackoff) other;
|
||||
return Objects.equals(discount, otherModel.discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final int doHashCode() {
|
||||
return Objects.hash(discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SmoothingModel innerFromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
double discount = DEFAULT_BACKOFF_DISCOUNT;
|
||||
while ((token = parser.nextToken()) != Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
}
|
||||
if (token.isValue() && parseContext.parseFieldMatcher().match(fieldName, DISCOUNT_FIELD)) {
|
||||
discount = parser.doubleValue();
|
||||
}
|
||||
}
|
||||
return new StupidBackoff(discount);
|
||||
}
|
||||
|
||||
@Override
|
||||
public WordScorerFactory buildWordScorerFactory() {
|
||||
return (IndexReader reader, Terms terms, String field, double realWordLikelyhood, BytesRef separator)
|
||||
-> new StupidBackoffScorer(reader, terms, field, realWordLikelyhood, separator, discount);
|
||||
}
|
||||
}
|
@ -42,7 +42,7 @@ public final class TermSuggestParser implements SuggestContextParser {
|
||||
MapperService mapperService = shardContext.getMapperService();
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
TermSuggestionContext suggestion = new TermSuggestionContext(suggester);
|
||||
TermSuggestionContext suggestion = new TermSuggestionContext(shardContext);
|
||||
DirectSpellcheckerSettings settings = suggestion.getDirectSpellCheckerSettings();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -40,7 +40,7 @@ import java.util.List;
|
||||
|
||||
public final class TermSuggester extends Suggester<TermSuggestionContext> {
|
||||
|
||||
static final TermSuggester PROTOTYPE = new TermSuggester();
|
||||
public static final TermSuggester PROTOTYPE = new TermSuggester();
|
||||
|
||||
@Override
|
||||
public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare)
|
||||
|
@ -18,13 +18,13 @@
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.term;
|
||||
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilderString;
|
||||
import org.elasticsearch.search.suggest.SortBy;
|
||||
import org.elasticsearch.search.suggest.Suggest.Suggestion;
|
||||
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
|
||||
|
||||
@ -38,6 +38,17 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
|
||||
|
||||
public static final Comparator<Suggestion.Entry.Option> SCORE = new Score();
|
||||
public static final Comparator<Suggestion.Entry.Option> FREQUENCY = new Frequency();
|
||||
public static final int TYPE = 1;
|
||||
|
||||
private SortBy sort;
|
||||
|
||||
public TermSuggestion() {
|
||||
}
|
||||
|
||||
public TermSuggestion(String name, int size, SortBy sort) {
|
||||
super(name, size);
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
// Same behaviour as comparators in suggest module, but for SuggestedWord
|
||||
// Highest score first, then highest freq first, then lowest term first
|
||||
@ -80,17 +91,6 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
|
||||
|
||||
}
|
||||
|
||||
public static final int TYPE = 1;
|
||||
private TermSuggestionBuilder.SortBy sort;
|
||||
|
||||
public TermSuggestion() {
|
||||
}
|
||||
|
||||
public TermSuggestion(String name, int size, TermSuggestionBuilder.SortBy sort) {
|
||||
super(name, size);
|
||||
this.sort = sort;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getType() {
|
||||
return TYPE;
|
||||
@ -111,13 +111,13 @@ public class TermSuggestion extends Suggestion<TermSuggestion.Entry> {
|
||||
@Override
|
||||
protected void innerReadFrom(StreamInput in) throws IOException {
|
||||
super.innerReadFrom(in);
|
||||
sort = TermSuggestionBuilder.SortBy.fromId(in.readByte());
|
||||
sort = SortBy.PROTOTYPE.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void innerWriteTo(StreamOutput out) throws IOException {
|
||||
super.innerWriteTo(out);
|
||||
out.writeByte(sort.id());
|
||||
sort.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -19,8 +19,13 @@
|
||||
|
||||
package org.elasticsearch.search.suggest.term;
|
||||
|
||||
import org.apache.lucene.search.spell.DirectSpellChecker;
|
||||
import org.apache.lucene.search.spell.JaroWinklerDistance;
|
||||
import org.apache.lucene.search.spell.LevensteinDistance;
|
||||
import org.apache.lucene.search.spell.LuceneLevenshteinDistance;
|
||||
import org.apache.lucene.search.spell.NGramDistance;
|
||||
import org.apache.lucene.search.spell.StringDistance;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
@ -29,7 +34,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
|
||||
import org.elasticsearch.search.suggest.SuggestUtils;
|
||||
import org.elasticsearch.search.suggest.SortBy;
|
||||
import org.elasticsearch.search.suggest.SuggestionBuilder;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
|
||||
@ -388,8 +393,22 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
|
||||
|
||||
@Override
|
||||
protected SuggestionContext innerBuild(QueryShardContext context) throws IOException {
|
||||
TermSuggestionContext suggestionContext = new TermSuggestionContext(TermSuggester.PROTOTYPE);
|
||||
return fillSuggestionContext(suggestionContext);
|
||||
TermSuggestionContext suggestionContext = new TermSuggestionContext(context);
|
||||
// copy over common settings to each suggestion builder
|
||||
populateCommonFields(context.getMapperService(), suggestionContext);
|
||||
// Transfers the builder settings to the target TermSuggestionContext
|
||||
DirectSpellcheckerSettings settings = suggestionContext.getDirectSpellCheckerSettings();
|
||||
settings.accuracy(accuracy);
|
||||
settings.maxEdits(maxEdits);
|
||||
settings.maxInspections(maxInspections);
|
||||
settings.maxTermFreq(maxTermFreq);
|
||||
settings.minDocFreq(minDocFreq);
|
||||
settings.minWordLength(minWordLength);
|
||||
settings.prefixLength(prefixLength);
|
||||
settings.sort(sort);
|
||||
settings.stringDistance(stringDistance.toLucene());
|
||||
settings.suggestMode(suggestMode.toLucene());
|
||||
return suggestionContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -447,31 +466,26 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
|
||||
maxTermFreq, prefixLength, minWordLength, minDocFreq);
|
||||
}
|
||||
|
||||
// Transfers the builder settings to the target TermSuggestionContext
|
||||
private TermSuggestionContext fillSuggestionContext(TermSuggestionContext context) {
|
||||
DirectSpellcheckerSettings settings = context.getDirectSpellCheckerSettings();
|
||||
settings.accuracy(accuracy);
|
||||
settings.maxEdits(maxEdits);
|
||||
settings.maxInspections(maxInspections);
|
||||
settings.maxTermFreq(maxTermFreq);
|
||||
settings.minDocFreq(minDocFreq);
|
||||
settings.minWordLength(minWordLength);
|
||||
settings.prefixLength(prefixLength);
|
||||
settings.sort(sort);
|
||||
settings.stringDistance(SuggestUtils.resolveStringDistance(stringDistance));
|
||||
settings.suggestMode(SuggestUtils.resolveSuggestMode(suggestMode));
|
||||
return context;
|
||||
}
|
||||
|
||||
|
||||
/** An enum representing the valid suggest modes. */
|
||||
public enum SuggestMode implements Writeable<SuggestMode> {
|
||||
/** Only suggest terms in the suggest text that aren't in the index. This is the default. */
|
||||
MISSING,
|
||||
MISSING {
|
||||
public org.apache.lucene.search.spell.SuggestMode toLucene() {
|
||||
return org.apache.lucene.search.spell.SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX;
|
||||
}
|
||||
},
|
||||
/** Only suggest terms that occur in more docs then the original suggest text term. */
|
||||
POPULAR,
|
||||
POPULAR {
|
||||
public org.apache.lucene.search.spell.SuggestMode toLucene() {
|
||||
return org.apache.lucene.search.spell.SuggestMode.SUGGEST_MORE_POPULAR;
|
||||
}
|
||||
},
|
||||
/** Suggest any matching suggest terms based on tokens in the suggest text. */
|
||||
ALWAYS;
|
||||
ALWAYS {
|
||||
public org.apache.lucene.search.spell.SuggestMode toLucene() {
|
||||
return org.apache.lucene.search.spell.SuggestMode.SUGGEST_ALWAYS;
|
||||
}
|
||||
};
|
||||
|
||||
protected static SuggestMode PROTOTYPE = MISSING;
|
||||
|
||||
@ -493,70 +507,43 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
|
||||
Objects.requireNonNull(str, "Input string is null");
|
||||
return valueOf(str.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
}
|
||||
|
||||
/** An enum representing the valid sorting options */
|
||||
public enum SortBy implements Writeable<SortBy> {
|
||||
/** Sort should first be based on score, then document frequency and then the term itself. */
|
||||
SCORE((byte) 0x0),
|
||||
/** Sort should first be based on document frequency, then score and then the term itself. */
|
||||
FREQUENCY((byte) 0x1);
|
||||
|
||||
protected static SortBy PROTOTYPE = SCORE;
|
||||
|
||||
private byte id;
|
||||
|
||||
SortBy(byte id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(final StreamOutput out) throws IOException {
|
||||
out.writeVInt(ordinal());
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortBy readFrom(final StreamInput in) throws IOException {
|
||||
int ordinal = in.readVInt();
|
||||
if (ordinal < 0 || ordinal >= values().length) {
|
||||
throw new IOException("Unknown SortBy ordinal [" + ordinal + "]");
|
||||
}
|
||||
return values()[ordinal];
|
||||
}
|
||||
|
||||
public static SortBy resolve(final String str) {
|
||||
Objects.requireNonNull(str, "Input string is null");
|
||||
return valueOf(str.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
public byte id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public static SortBy fromId(byte id) {
|
||||
if (id == 0) {
|
||||
return SCORE;
|
||||
} else if (id == 1) {
|
||||
return FREQUENCY;
|
||||
} else {
|
||||
throw new ElasticsearchException("Illegal suggest sort " + id);
|
||||
}
|
||||
}
|
||||
public abstract org.apache.lucene.search.spell.SuggestMode toLucene();
|
||||
}
|
||||
|
||||
/** An enum representing the valid string edit distance algorithms for determining suggestions. */
|
||||
public enum StringDistanceImpl implements Writeable<StringDistanceImpl> {
|
||||
/** This is the default and is based on <code>damerau_levenshtein</code>, but highly optimized
|
||||
* for comparing string distance for terms inside the index. */
|
||||
INTERNAL,
|
||||
INTERNAL {
|
||||
public StringDistance toLucene() {
|
||||
return DirectSpellChecker.INTERNAL_LEVENSHTEIN;
|
||||
}
|
||||
},
|
||||
/** String distance algorithm based on Damerau-Levenshtein algorithm. */
|
||||
DAMERAU_LEVENSHTEIN,
|
||||
DAMERAU_LEVENSHTEIN {
|
||||
public StringDistance toLucene() {
|
||||
return new LuceneLevenshteinDistance();
|
||||
}
|
||||
},
|
||||
/** String distance algorithm based on Levenstein edit distance algorithm. */
|
||||
LEVENSTEIN,
|
||||
LEVENSTEIN {
|
||||
public StringDistance toLucene() {
|
||||
return new LevensteinDistance();
|
||||
}
|
||||
},
|
||||
/** String distance algorithm based on Jaro-Winkler algorithm. */
|
||||
JAROWINKLER,
|
||||
JAROWINKLER {
|
||||
public StringDistance toLucene() {
|
||||
return new JaroWinklerDistance();
|
||||
}
|
||||
},
|
||||
/** String distance algorithm based on character n-grams. */
|
||||
NGRAM;
|
||||
NGRAM {
|
||||
public StringDistance toLucene() {
|
||||
return new NGramDistance();
|
||||
}
|
||||
};
|
||||
|
||||
protected static StringDistanceImpl PROTOTYPE = INTERNAL;
|
||||
|
||||
@ -592,6 +579,8 @@ public class TermSuggestionBuilder extends SuggestionBuilder<TermSuggestionBuild
|
||||
default: throw new IllegalArgumentException("Illegal distance option " + str);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract StringDistance toLucene();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,16 +18,16 @@
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.term;
|
||||
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
|
||||
import org.elasticsearch.search.suggest.Suggester;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
|
||||
|
||||
final class TermSuggestionContext extends SuggestionContext {
|
||||
|
||||
private final DirectSpellcheckerSettings settings = new DirectSpellcheckerSettings();
|
||||
|
||||
public TermSuggestionContext(Suggester<? extends TermSuggestionContext> suggester) {
|
||||
super(suggester);
|
||||
public TermSuggestionContext(QueryShardContext shardContext) {
|
||||
super(TermSuggester.PROTOTYPE, shardContext);
|
||||
}
|
||||
|
||||
public DirectSpellcheckerSettings getDirectSpellCheckerSettings() {
|
||||
|
@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.tasks;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
/**
|
||||
* A task that can be canceled
|
||||
*/
|
||||
public class CancellableTask extends Task {
|
||||
|
||||
private final AtomicReference<String> reason = new AtomicReference<>();
|
||||
|
||||
public CancellableTask(long id, String type, String action, String description) {
|
||||
super(id, type, action, description);
|
||||
}
|
||||
|
||||
public CancellableTask(long id, String type, String action, String description, String parentNode, long parentId) {
|
||||
super(id, type, action, description, parentNode, parentId);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is called by the task manager when this task is cancelled.
|
||||
*/
|
||||
final void cancel(String reason) {
|
||||
assert reason != null;
|
||||
this.reason.compareAndSet(null, reason);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this task should be automatically cancelled if the coordinating node that
|
||||
* requested this task left the cluster.
|
||||
*/
|
||||
public boolean cancelOnParentLeaving() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public boolean isCancelled() {
|
||||
return reason.get() != null;
|
||||
}
|
||||
|
||||
}
|
@ -22,7 +22,6 @@ package org.elasticsearch.tasks;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.inject.Provider;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
||||
@ -39,18 +38,18 @@ public class Task {
|
||||
|
||||
private final String action;
|
||||
|
||||
private final Provider<String> description;
|
||||
private final String description;
|
||||
|
||||
private final String parentNode;
|
||||
|
||||
private final long parentId;
|
||||
|
||||
|
||||
public Task(long id, String type, String action, Provider<String> description) {
|
||||
public Task(long id, String type, String action, String description) {
|
||||
this(id, type, action, description, null, NO_PARENT_ID);
|
||||
}
|
||||
|
||||
public Task(long id, String type, String action, Provider<String> description, String parentNode, long parentId) {
|
||||
public Task(long id, String type, String action, String description, String parentNode, long parentId) {
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
this.action = action;
|
||||
@ -104,7 +103,7 @@ public class Task {
|
||||
* Generates task description
|
||||
*/
|
||||
public String getDescription() {
|
||||
return description.get();
|
||||
return description;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -19,34 +19,50 @@
|
||||
|
||||
package org.elasticsearch.tasks;
|
||||
|
||||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterStateListener;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
* Task Manager service for keeping track of currently running tasks on the nodes
|
||||
*/
|
||||
public class TaskManager extends AbstractComponent {
|
||||
public class TaskManager extends AbstractComponent implements ClusterStateListener {
|
||||
|
||||
private final ConcurrentMapLong<Task> tasks = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
|
||||
|
||||
private final ConcurrentMapLong<CancellableTaskHolder> cancellableTasks = ConcurrentCollections
|
||||
.newConcurrentMapLongWithAggressiveConcurrency();
|
||||
|
||||
private final AtomicLong taskIdGenerator = new AtomicLong();
|
||||
|
||||
private final Map<Tuple<String, Long>, String> banedParents = new ConcurrentHashMap<>();
|
||||
|
||||
public TaskManager(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
private DiscoveryNodes lastDiscoveryNodes = DiscoveryNodes.EMPTY_NODES;
|
||||
|
||||
/**
|
||||
* Registers a task without parent task
|
||||
* <p>
|
||||
* Returns the task manager tracked task or null if the task doesn't support the task manager
|
||||
*/
|
||||
public Task register(String type, String action, TransportRequest request) {
|
||||
Task task = request.createTask(taskIdGenerator.incrementAndGet(), type, action);
|
||||
@ -54,24 +70,291 @@ public class TaskManager extends AbstractComponent {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("register {} [{}] [{}] [{}]", task.getId(), type, action, task.getDescription());
|
||||
}
|
||||
Task previousTask = tasks.put(task.getId(), task);
|
||||
assert previousTask == null;
|
||||
|
||||
if (task instanceof CancellableTask) {
|
||||
CancellableTask cancellableTask = (CancellableTask) task;
|
||||
CancellableTaskHolder holder = new CancellableTaskHolder(cancellableTask);
|
||||
CancellableTaskHolder oldHolder = cancellableTasks.put(task.getId(), holder);
|
||||
assert oldHolder == null;
|
||||
// Check if this task was banned before we start it
|
||||
if (task.getParentNode() != null && banedParents.isEmpty() == false) {
|
||||
String reason = banedParents.get(new Tuple<>(task.getParentNode(), task.getParentId()));
|
||||
if (reason != null) {
|
||||
try {
|
||||
holder.cancel(reason);
|
||||
throw new IllegalStateException("Task cancelled before it started: " + reason);
|
||||
} finally {
|
||||
// let's clean up the registration
|
||||
unregister(task);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Task previousTask = tasks.put(task.getId(), task);
|
||||
assert previousTask == null;
|
||||
}
|
||||
|
||||
}
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancels a task
|
||||
* <p>
|
||||
* Returns a set of nodes with child tasks where this task should be cancelled if cancellation was successful, null otherwise.
|
||||
*/
|
||||
public Set<String> cancel(CancellableTask task, String reason, Consumer<Set<String>> listener) {
|
||||
CancellableTaskHolder holder = cancellableTasks.get(task.getId());
|
||||
if (holder != null) {
|
||||
logger.trace("cancelling task with id {}", task.getId());
|
||||
return holder.cancel(reason, listener);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister the task
|
||||
*/
|
||||
public Task unregister(Task task) {
|
||||
logger.trace("unregister task for id: {}", task.getId());
|
||||
return tasks.remove(task.getId());
|
||||
if (task instanceof CancellableTask) {
|
||||
CancellableTaskHolder holder = cancellableTasks.remove(task.getId());
|
||||
if (holder != null) {
|
||||
holder.finish();
|
||||
return holder.getTask();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return tasks.remove(task.getId());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of currently running tasks on the node
|
||||
*/
|
||||
public Map<Long, Task> getTasks() {
|
||||
return Collections.unmodifiableMap(new HashMap<>(tasks));
|
||||
HashMap<Long, Task> taskHashMap = new HashMap<>(this.tasks);
|
||||
for (CancellableTaskHolder holder : cancellableTasks.values()) {
|
||||
taskHashMap.put(holder.getTask().getId(), holder.getTask());
|
||||
}
|
||||
return Collections.unmodifiableMap(taskHashMap);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the list of currently running tasks on the node that can be cancelled
|
||||
*/
|
||||
public Map<Long, CancellableTask> getCancellableTasks() {
|
||||
HashMap<Long, CancellableTask> taskHashMap = new HashMap<>();
|
||||
for (CancellableTaskHolder holder : cancellableTasks.values()) {
|
||||
taskHashMap.put(holder.getTask().getId(), holder.getTask());
|
||||
}
|
||||
return Collections.unmodifiableMap(taskHashMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a task with given id, or null if the task is not found.
|
||||
*/
|
||||
public Task getTask(long id) {
|
||||
Task task = tasks.get(id);
|
||||
if (task != null) {
|
||||
return task;
|
||||
} else {
|
||||
return getCancellableTask(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a cancellable task with given id, or null if the task is not found.
|
||||
*/
|
||||
public CancellableTask getCancellableTask(long id) {
|
||||
CancellableTaskHolder holder = cancellableTasks.get(id);
|
||||
if (holder != null) {
|
||||
return holder.getTask();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of currently banned tasks.
|
||||
* <p>
|
||||
* Will be used in task manager stats and for debugging.
|
||||
*/
|
||||
public int getBanCount() {
|
||||
return banedParents.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Bans all tasks with the specified parent task from execution, cancels all tasks that are currently executing.
|
||||
* <p>
|
||||
* This method is called when a parent task that has children is cancelled.
|
||||
*/
|
||||
public void setBan(String parentNode, long parentId, String reason) {
|
||||
logger.trace("setting ban for the parent task {}:{} {}", parentNode, parentId, reason);
|
||||
|
||||
// Set the ban first, so the newly created tasks cannot be registered
|
||||
Tuple<String, Long> ban = new Tuple<>(parentNode, parentId);
|
||||
synchronized (banedParents) {
|
||||
if (lastDiscoveryNodes.nodeExists(parentNode)) {
|
||||
// Only set the ban if the node is the part of the cluster
|
||||
banedParents.put(ban, reason);
|
||||
}
|
||||
}
|
||||
|
||||
// Now go through already running tasks and cancel them
|
||||
for (Map.Entry<Long, CancellableTaskHolder> taskEntry : cancellableTasks.entrySet()) {
|
||||
CancellableTaskHolder holder = taskEntry.getValue();
|
||||
if (holder.hasParent(parentNode, parentId)) {
|
||||
holder.cancel(reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the ban for the specified parent task.
|
||||
* <p>
|
||||
* This method is called when a previously banned task finally cancelled
|
||||
*/
|
||||
public void removeBan(String parentNode, long parentId) {
|
||||
logger.trace("removing ban for the parent task {}:{} {}", parentNode, parentId);
|
||||
banedParents.remove(new Tuple<>(parentNode, parentId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clusterChanged(ClusterChangedEvent event) {
|
||||
if (event.nodesRemoved()) {
|
||||
synchronized (banedParents) {
|
||||
lastDiscoveryNodes = event.state().getNodes();
|
||||
// Remove all bans that were registered by nodes that are no longer in the cluster state
|
||||
Iterator<Tuple<String, Long>> banIterator = banedParents.keySet().iterator();
|
||||
while (banIterator.hasNext()) {
|
||||
Tuple<String, Long> nodeAndTaskId = banIterator.next();
|
||||
String nodeId = nodeAndTaskId.v1();
|
||||
Long taskId = nodeAndTaskId.v2();
|
||||
if (lastDiscoveryNodes.nodeExists(nodeId) == false) {
|
||||
logger.debug("Removing ban for the parent [{}:{}] on the node [{}], reason: the parent node is gone", nodeId,
|
||||
taskId, event.state().getNodes().localNode());
|
||||
banIterator.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Cancel cancellable tasks for the nodes that are gone
|
||||
for (Map.Entry<Long, CancellableTaskHolder> taskEntry : cancellableTasks.entrySet()) {
|
||||
CancellableTaskHolder holder = taskEntry.getValue();
|
||||
CancellableTask task = holder.getTask();
|
||||
String parent = task.getParentNode();
|
||||
if (parent != null && lastDiscoveryNodes.nodeExists(parent) == false) {
|
||||
if (task.cancelOnParentLeaving()) {
|
||||
holder.cancel("Coordinating node [" + parent + "] left the cluster");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void registerChildTask(Task task, String node) {
|
||||
if (task == null || task instanceof CancellableTask == false) {
|
||||
// We don't have a cancellable task - not much we can do here
|
||||
return;
|
||||
}
|
||||
CancellableTaskHolder holder = cancellableTasks.get(task.getId());
|
||||
if (holder != null) {
|
||||
holder.registerChildTaskNode(node);
|
||||
}
|
||||
}
|
||||
|
||||
private static class CancellableTaskHolder {
|
||||
|
||||
private static final String TASK_FINISHED_MARKER = "task finished";
|
||||
|
||||
private final CancellableTask task;
|
||||
|
||||
private final Set<String> nodesWithChildTasks = new HashSet<>();
|
||||
|
||||
private volatile String cancellationReason = null;
|
||||
|
||||
private volatile Consumer<Set<String>> cancellationListener = null;
|
||||
|
||||
public CancellableTaskHolder(CancellableTask task) {
|
||||
this.task = task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks task as cancelled.
|
||||
* <p>
|
||||
* Returns a set of nodes with child tasks where this task should be cancelled if cancellation was successful, null otherwise.
|
||||
*/
|
||||
public Set<String> cancel(String reason, Consumer<Set<String>> listener) {
|
||||
Set<String> nodes;
|
||||
synchronized (this) {
|
||||
assert reason != null;
|
||||
if (cancellationReason == null) {
|
||||
cancellationReason = reason;
|
||||
cancellationListener = listener;
|
||||
nodes = Collections.unmodifiableSet(nodesWithChildTasks);
|
||||
} else {
|
||||
// Already cancelled by somebody else
|
||||
nodes = null;
|
||||
}
|
||||
}
|
||||
if (nodes != null) {
|
||||
task.cancel(reason);
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks task as cancelled.
|
||||
* <p>
|
||||
* Returns a set of nodes with child tasks where this task should be cancelled if cancellation was successful, null otherwise.
|
||||
*/
|
||||
public Set<String> cancel(String reason) {
|
||||
return cancel(reason, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks task as finished.
|
||||
*/
|
||||
public void finish() {
|
||||
Consumer<Set<String>> listener = null;
|
||||
Set<String> nodes = null;
|
||||
synchronized (this) {
|
||||
if (cancellationReason != null) {
|
||||
// The task was cancelled, we need to notify the listener
|
||||
if (cancellationListener != null) {
|
||||
listener = cancellationListener;
|
||||
nodes = Collections.unmodifiableSet(nodesWithChildTasks);
|
||||
cancellationListener = null;
|
||||
}
|
||||
} else {
|
||||
cancellationReason = TASK_FINISHED_MARKER;
|
||||
}
|
||||
}
|
||||
// We need to call the listener outside of the synchronised section to avoid potential bottle necks
|
||||
// in the listener synchronization
|
||||
if (listener != null) {
|
||||
listener.accept(nodes);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public boolean hasParent(String parentNode, long parentId) {
|
||||
return parentId == task.getParentId() && parentNode.equals(task.getParentNode());
|
||||
}
|
||||
|
||||
public CancellableTask getTask() {
|
||||
return task;
|
||||
}
|
||||
|
||||
public synchronized void registerChildTaskNode(String nodeId) {
|
||||
if (cancellationReason == null) {
|
||||
nodesWithChildTasks.add(nodeId);
|
||||
} else {
|
||||
throw new IllegalStateException("cannot register child task request, the task is already cancelled");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -33,14 +33,20 @@ public abstract class TransportRequest extends TransportMessage<TransportRequest
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the task object that should be used to keep track of the processing of the request.
|
||||
*
|
||||
* A request can override this method and return null to avoid being tracked by the task manager.
|
||||
*/
|
||||
public Task createTask(long id, String type, String action) {
|
||||
return new Task(id, type, action, this::getDescription);
|
||||
return new Task(id, type, action, getDescription());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns optional description of the request to be displayed by the task manager
|
||||
*/
|
||||
public String getDescription() {
|
||||
return this.toString();
|
||||
return "";
|
||||
}
|
||||
|
||||
}
|
||||
|
384
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
Normal file
384
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
Normal file
@ -0,0 +1,384 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
|
||||
import org.elasticsearch.action.support.replication.ClusterStateCreationUtils;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.CancellableTask;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
||||
public class CancellableTasksTests extends TaskManagerTestCase {
|
||||
|
||||
public static class CancellableNodeRequest extends BaseNodeRequest {
|
||||
protected String requestName;
|
||||
protected String nodeId;
|
||||
|
||||
public CancellableNodeRequest() {
|
||||
super();
|
||||
}
|
||||
|
||||
public CancellableNodeRequest(CancellableNodesRequest request, String nodeId) {
|
||||
super(nodeId);
|
||||
requestName = request.requestName;
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
requestName = in.readString();
|
||||
nodeId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(requestName);
|
||||
out.writeString(nodeId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "CancellableNodeRequest[" + requestName + ", " + nodeId + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) {
|
||||
return new CancellableTask(id, type, action, getDescription(), parentTaskNode, parentTaskId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class CancellableNodesRequest extends BaseNodesRequest<CancellableNodesRequest> {
|
||||
private String requestName;
|
||||
|
||||
private CancellableNodesRequest() {
|
||||
super();
|
||||
}
|
||||
|
||||
public CancellableNodesRequest(String requestName, String... nodesIds) {
|
||||
super(nodesIds);
|
||||
this.requestName = requestName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
requestName = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(requestName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "CancellableNodesRequest[" + requestName + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action) {
|
||||
return new CancellableTask(id, type, action, getDescription());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulates a cancellable node-based task that can be used to block node tasks so they are guaranteed to be registered by task manager
|
||||
*/
|
||||
class CancellableTestNodesAction extends AbstractTestNodesAction<CancellableNodesRequest, CancellableNodeRequest> {
|
||||
|
||||
// True if the node operation should get stuck until its cancelled
|
||||
final boolean shouldBlock;
|
||||
|
||||
final CountDownLatch actionStartedLatch;
|
||||
|
||||
CancellableTestNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool,
|
||||
ClusterService clusterService, TransportService transportService, boolean shouldBlock, CountDownLatch
|
||||
actionStartedLatch) {
|
||||
super(settings, actionName, clusterName, threadPool, clusterService, transportService, CancellableNodesRequest::new,
|
||||
CancellableNodeRequest::new);
|
||||
this.shouldBlock = shouldBlock;
|
||||
this.actionStartedLatch = actionStartedLatch;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CancellableNodeRequest newNodeRequest(String nodeId, CancellableNodesRequest request) {
|
||||
return new CancellableNodeRequest(request, nodeId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse nodeOperation(CancellableNodeRequest request, Task task) {
|
||||
assert task instanceof CancellableTask;
|
||||
debugDelay(request.nodeId, "op1");
|
||||
if (actionStartedLatch != null) {
|
||||
actionStartedLatch.countDown();
|
||||
}
|
||||
|
||||
debugDelay(request.nodeId, "op2");
|
||||
if (shouldBlock) {
|
||||
// Simulate a job that takes forever to finish
|
||||
// Using periodic checks method to identify that the task was cancelled
|
||||
try {
|
||||
awaitBusy(() -> {
|
||||
if (((CancellableTask) task).isCancelled()) {
|
||||
throw new RuntimeException("Cancelled");
|
||||
}
|
||||
return false;
|
||||
});
|
||||
fail("It should have thrown an exception");
|
||||
} catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
debugDelay(request.nodeId, "op4");
|
||||
|
||||
return new NodeResponse(clusterService.localNode());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse nodeOperation(CancellableNodeRequest request) {
|
||||
throw new UnsupportedOperationException("the task parameter is required");
|
||||
}
|
||||
}
|
||||
|
||||
private Task startCancellableTestNodesAction(boolean waitForActionToStart, int blockedNodesCount, ActionListener<NodesResponse>
|
||||
listener) throws InterruptedException {
|
||||
return startCancellableTestNodesAction(waitForActionToStart, randomSubsetOf(blockedNodesCount, testNodes), new
|
||||
CancellableNodesRequest("Test Request"), listener);
|
||||
}
|
||||
|
||||
private Task startCancellableTestNodesAction(boolean waitForActionToStart, Collection<TestNode> blockOnNodes, CancellableNodesRequest
|
||||
request, ActionListener<NodesResponse> listener) throws InterruptedException {
|
||||
CountDownLatch actionLatch = waitForActionToStart ? new CountDownLatch(nodesCount) : null;
|
||||
CancellableTestNodesAction[] actions = new CancellableTestNodesAction[nodesCount];
|
||||
for (int i = 0; i < testNodes.length; i++) {
|
||||
boolean shouldBlock = blockOnNodes.contains(testNodes[i]);
|
||||
logger.info("The action in the node [{}] should block: [{}]", testNodes[i].discoveryNode.getId(), shouldBlock);
|
||||
actions[i] = new CancellableTestNodesAction(Settings.EMPTY, "testAction", clusterName, threadPool, testNodes[i]
|
||||
.clusterService, testNodes[i].transportService, shouldBlock, actionLatch);
|
||||
}
|
||||
Task task = actions[0].execute(request, listener);
|
||||
if (waitForActionToStart) {
|
||||
logger.info("Awaiting for all actions to start");
|
||||
actionLatch.await();
|
||||
logger.info("Done waiting for all actions to start");
|
||||
}
|
||||
return task;
|
||||
}
|
||||
|
||||
public void testBasicTaskCancellation() throws Exception {
|
||||
setupTestNodes(Settings.EMPTY);
|
||||
connectNodes(testNodes);
|
||||
CountDownLatch responseLatch = new CountDownLatch(1);
|
||||
boolean waitForActionToStart = randomBoolean();
|
||||
logger.info("waitForActionToStart is set to {}", waitForActionToStart);
|
||||
final AtomicReference<NodesResponse> responseReference = new AtomicReference<>();
|
||||
final AtomicReference<Throwable> throwableReference = new AtomicReference<>();
|
||||
int blockedNodesCount = randomIntBetween(0, nodesCount);
|
||||
Task mainTask = startCancellableTestNodesAction(waitForActionToStart, blockedNodesCount, new ActionListener<NodesResponse>() {
|
||||
@Override
|
||||
public void onResponse(NodesResponse listTasksResponse) {
|
||||
responseReference.set(listTasksResponse);
|
||||
responseLatch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
throwableReference.set(e);
|
||||
responseLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
// Cancel main task
|
||||
CancelTasksRequest request = new CancelTasksRequest(testNodes[0].discoveryNode.getId());
|
||||
request.reason("Testing Cancellation");
|
||||
request.taskId(mainTask.getId());
|
||||
// And send the cancellation request to a random node
|
||||
CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request)
|
||||
.get();
|
||||
|
||||
// Awaiting for the main task to finish
|
||||
responseLatch.await();
|
||||
|
||||
if (response.getTasks().size() == 0) {
|
||||
// We didn't cancel the request and it finished successfully
|
||||
// That should be rare and can be only in case we didn't block on a single node
|
||||
assertEquals(0, blockedNodesCount);
|
||||
// Make sure that the request was successful
|
||||
assertNull(throwableReference.get());
|
||||
assertNotNull(responseReference.get());
|
||||
assertEquals(nodesCount, responseReference.get().getNodes().length);
|
||||
assertEquals(0, responseReference.get().failureCount());
|
||||
} else {
|
||||
// We canceled the request, in this case it should have fail, but we should get partial response
|
||||
assertNull(throwableReference.get());
|
||||
assertEquals(nodesCount, responseReference.get().failureCount() + responseReference.get().getNodes().length);
|
||||
// and we should have at least as many failures as the number of blocked operations
|
||||
// (we might have cancelled some non-blocked operations before they even started and that's ok)
|
||||
assertThat(responseReference.get().failureCount(), greaterThanOrEqualTo(blockedNodesCount));
|
||||
|
||||
// We should have the information about the cancelled task in the cancel operation response
|
||||
assertEquals(1, response.getTasks().size());
|
||||
assertEquals(mainTask.getId(), response.getTasks().get(0).getId());
|
||||
}
|
||||
|
||||
// Make sure that tasks are no longer running
|
||||
ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)]
|
||||
.transportListTasksAction.execute(new ListTasksRequest(testNodes[0].discoveryNode.getId()).taskId(mainTask.getId())).get();
|
||||
assertEquals(0, listTasksResponse.getTasks().size());
|
||||
|
||||
// Make sure that there are no leftover bans, the ban removal is async, so we might return from the cancellation
|
||||
// while the ban is still there, but it should disappear shortly
|
||||
assertBusy(() -> {
|
||||
for (int i = 0; i < testNodes.length; i++) {
|
||||
assertEquals("No bans on the node " + i, 0, testNodes[i].transportService.getTaskManager().getBanCount());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testTaskCancellationOnCoordinatingNodeLeavingTheCluster() throws Exception {
|
||||
setupTestNodes(Settings.EMPTY);
|
||||
connectNodes(testNodes);
|
||||
CountDownLatch responseLatch = new CountDownLatch(1);
|
||||
boolean simulateBanBeforeLeaving = randomBoolean();
|
||||
final AtomicReference<NodesResponse> responseReference = new AtomicReference<>();
|
||||
final AtomicReference<Throwable> throwableReference = new AtomicReference<>();
|
||||
int blockedNodesCount = randomIntBetween(0, nodesCount - 1);
|
||||
|
||||
// We shouldn't block on the first node since it's leaving the cluster anyway so it doesn't matter
|
||||
List<TestNode> blockOnNodes = randomSubsetOf(blockedNodesCount, Arrays.copyOfRange(testNodes, 1, nodesCount));
|
||||
Task mainTask = startCancellableTestNodesAction(true, blockOnNodes, new CancellableNodesRequest("Test Request"), new
|
||||
ActionListener<NodesResponse>() {
|
||||
@Override
|
||||
public void onResponse(NodesResponse listTasksResponse) {
|
||||
responseReference.set(listTasksResponse);
|
||||
responseLatch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
throwableReference.set(e);
|
||||
responseLatch.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
String mainNode = testNodes[0].discoveryNode.getId();
|
||||
|
||||
// Make sure that tasks are running
|
||||
ListTasksResponse listTasksResponse = testNodes[randomIntBetween(0, testNodes.length - 1)]
|
||||
.transportListTasksAction.execute(new ListTasksRequest().parentNode(mainNode).taskId(mainTask.getId())).get();
|
||||
assertThat(listTasksResponse.getTasks().size(), greaterThanOrEqualTo(blockOnNodes.size()));
|
||||
|
||||
// Simulate the coordinating node leaving the cluster
|
||||
DiscoveryNode[] discoveryNodes = new DiscoveryNode[testNodes.length - 1];
|
||||
for (int i = 1; i < testNodes.length; i++) {
|
||||
discoveryNodes[i - 1] = testNodes[i].discoveryNode;
|
||||
}
|
||||
DiscoveryNode master = discoveryNodes[0];
|
||||
for (int i = 1; i < testNodes.length; i++) {
|
||||
// Notify only nodes that should remain in the cluster
|
||||
testNodes[i].clusterService.setState(ClusterStateCreationUtils.state(testNodes[i].discoveryNode, master, discoveryNodes));
|
||||
}
|
||||
|
||||
if (simulateBanBeforeLeaving) {
|
||||
logger.info("--> Simulate issuing cancel request on the node that is about to leave the cluster");
|
||||
// Simulate issuing cancel request on the node that is about to leave the cluster
|
||||
CancelTasksRequest request = new CancelTasksRequest(testNodes[0].discoveryNode.getId());
|
||||
request.reason("Testing Cancellation");
|
||||
request.taskId(mainTask.getId());
|
||||
// And send the cancellation request to a random node
|
||||
CancelTasksResponse response = testNodes[0].transportCancelTasksAction.execute(request).get();
|
||||
logger.info("--> Done simulating issuing cancel request on the node that is about to leave the cluster");
|
||||
// This node still thinks that's part of the cluster, so cancelling should look successful
|
||||
if (response.getTasks().size() == 0) {
|
||||
logger.error("!!!!");
|
||||
}
|
||||
assertThat(response.getTasks().size(), lessThanOrEqualTo(1));
|
||||
assertThat(response.getTaskFailures().size(), lessThanOrEqualTo(1));
|
||||
assertThat(response.getTaskFailures().size() + response.getTasks().size(), lessThanOrEqualTo(1));
|
||||
}
|
||||
|
||||
for (int i = 1; i < testNodes.length; i++) {
|
||||
assertEquals("No bans on the node " + i, 0, testNodes[i].transportService.getTaskManager().getBanCount());
|
||||
}
|
||||
|
||||
// Close the first node
|
||||
testNodes[0].close();
|
||||
|
||||
assertBusy(() -> {
|
||||
// Make sure that tasks are no longer running
|
||||
try {
|
||||
ListTasksResponse listTasksResponse1 = testNodes[randomIntBetween(1, testNodes.length - 1)]
|
||||
.transportListTasksAction.execute(new ListTasksRequest().parentNode(mainNode).taskId(mainTask.getId())).get();
|
||||
assertEquals(0, listTasksResponse1.getTasks().size());
|
||||
} catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
} catch (ExecutionException ex2) {
|
||||
fail("shouldn't be here");
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for clean up
|
||||
responseLatch.await();
|
||||
|
||||
}
|
||||
|
||||
private static void debugDelay(String nodeId, String name) {
|
||||
// Introduce an additional pseudo random repeatable race conditions
|
||||
String delayName = RandomizedContext.current().getRunnerSeedAsString() + ":" + nodeId + ":" + name;
|
||||
Random random = new Random(delayName.hashCode());
|
||||
if (RandomInts.randomIntBetween(random, 0, 10) < 1) {
|
||||
try {
|
||||
Thread.sleep(RandomInts.randomIntBetween(random, 20, 50));
|
||||
} catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
245
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java
Normal file
245
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java
Normal file
@ -0,0 +1,245 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
|
||||
import org.elasticsearch.action.support.nodes.TransportNodesAction;
|
||||
import org.elasticsearch.action.support.replication.ClusterStateCreationUtils;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.TaskManager;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.cluster.TestClusterService;
|
||||
import org.elasticsearch.test.tasks.MockTaskManager;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.transport.local.LocalTransport;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
* The test case for unit testing task manager and related transport actions
|
||||
*/
|
||||
public abstract class TaskManagerTestCase extends ESTestCase {
|
||||
|
||||
protected static ThreadPool threadPool;
|
||||
public static final ClusterName clusterName = new ClusterName("test-cluster");
|
||||
protected TestNode[] testNodes;
|
||||
protected int nodesCount;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() {
|
||||
threadPool = new ThreadPool(TransportTasksActionTests.class.getSimpleName());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() {
|
||||
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
|
||||
threadPool = null;
|
||||
}
|
||||
|
||||
public void setupTestNodes(Settings settings) {
|
||||
nodesCount = randomIntBetween(2, 10);
|
||||
testNodes = new TestNode[nodesCount];
|
||||
for (int i = 0; i < testNodes.length; i++) {
|
||||
testNodes[i] = new TestNode("node" + i, threadPool, settings);
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public final void shutdownTestNodes() throws Exception {
|
||||
for (TestNode testNode : testNodes) {
|
||||
testNode.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class NodeResponse extends BaseNodeResponse {
|
||||
|
||||
protected NodeResponse() {
|
||||
super();
|
||||
}
|
||||
|
||||
protected NodeResponse(DiscoveryNode node) {
|
||||
super(node);
|
||||
}
|
||||
}
|
||||
|
||||
static class NodesResponse extends BaseNodesResponse<NodeResponse> {
|
||||
|
||||
private int failureCount;
|
||||
|
||||
protected NodesResponse(ClusterName clusterName, NodeResponse[] nodes, int failureCount) {
|
||||
super(clusterName, nodes);
|
||||
this.failureCount = failureCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
failureCount = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(failureCount);
|
||||
}
|
||||
|
||||
public int failureCount() {
|
||||
return failureCount;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulates node-based task that can be used to block node tasks so they are guaranteed to be registered by task manager
|
||||
*/
|
||||
abstract class AbstractTestNodesAction<NodesRequest extends BaseNodesRequest<NodesRequest>, NodeRequest extends BaseNodeRequest>
|
||||
extends TransportNodesAction<NodesRequest, NodesResponse, NodeRequest, NodeResponse> {
|
||||
|
||||
AbstractTestNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool,
|
||||
ClusterService clusterService, TransportService transportService, Supplier<NodesRequest> request,
|
||||
Supplier<NodeRequest> nodeRequest) {
|
||||
super(settings, actionName, clusterName, threadPool, clusterService, transportService,
|
||||
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
|
||||
request, nodeRequest, ThreadPool.Names.GENERIC);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses) {
|
||||
final List<NodeResponse> nodesList = new ArrayList<>();
|
||||
int failureCount = 0;
|
||||
for (int i = 0; i < responses.length(); i++) {
|
||||
Object resp = responses.get(i);
|
||||
if (resp instanceof NodeResponse) { // will also filter out null response for unallocated ones
|
||||
nodesList.add((NodeResponse) resp);
|
||||
} else if (resp instanceof FailedNodeException) {
|
||||
failureCount++;
|
||||
} else {
|
||||
logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp);
|
||||
}
|
||||
}
|
||||
return new NodesResponse(clusterName, nodesList.toArray(new NodeResponse[nodesList.size()]), failureCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse newNodeResponse() {
|
||||
return new NodeResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected abstract NodeResponse nodeOperation(NodeRequest request);
|
||||
|
||||
@Override
|
||||
protected boolean accumulateExceptions() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class TestNode implements Releasable {
|
||||
public TestNode(String name, ThreadPool threadPool, Settings settings) {
|
||||
transportService = new TransportService(settings,
|
||||
new LocalTransport(settings, threadPool, Version.CURRENT, new NamedWriteableRegistry()),
|
||||
threadPool, new NamedWriteableRegistry()) {
|
||||
@Override
|
||||
protected TaskManager createTaskManager() {
|
||||
if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) {
|
||||
return new MockTaskManager(settings);
|
||||
} else {
|
||||
return super.createTaskManager();
|
||||
}
|
||||
}
|
||||
};
|
||||
transportService.start();
|
||||
clusterService = new TestClusterService(threadPool, transportService);
|
||||
clusterService.add(transportService.getTaskManager());
|
||||
discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), Version.CURRENT);
|
||||
IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(settings);
|
||||
ActionFilters actionFilters = new ActionFilters(Collections.emptySet());
|
||||
transportListTasksAction = new TransportListTasksAction(settings, clusterName, threadPool, clusterService, transportService,
|
||||
actionFilters, indexNameExpressionResolver);
|
||||
transportCancelTasksAction = new TransportCancelTasksAction(settings, clusterName, threadPool, clusterService, transportService,
|
||||
actionFilters, indexNameExpressionResolver);
|
||||
}
|
||||
|
||||
public final TestClusterService clusterService;
|
||||
public final TransportService transportService;
|
||||
public final DiscoveryNode discoveryNode;
|
||||
public final TransportListTasksAction transportListTasksAction;
|
||||
public final TransportCancelTasksAction transportCancelTasksAction;
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
transportService.close();
|
||||
}
|
||||
}
|
||||
|
||||
public static void connectNodes(TestNode... nodes) {
|
||||
DiscoveryNode[] discoveryNodes = new DiscoveryNode[nodes.length];
|
||||
for (int i = 0; i < nodes.length; i++) {
|
||||
discoveryNodes[i] = nodes[i].discoveryNode;
|
||||
}
|
||||
DiscoveryNode master = discoveryNodes[0];
|
||||
for (TestNode node : nodes) {
|
||||
node.clusterService.setState(ClusterStateCreationUtils.state(node.discoveryNode, master, discoveryNodes));
|
||||
}
|
||||
for (TestNode nodeA : nodes) {
|
||||
for (TestNode nodeB : nodes) {
|
||||
nodeA.transportService.connectToNode(nodeB.discoveryNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static RecordingTaskManagerListener[] setupListeners(TestNode[] nodes, String... actionMasks) {
|
||||
RecordingTaskManagerListener[] listeners = new RecordingTaskManagerListener[nodes.length];
|
||||
for (int i = 0; i < nodes.length; i++) {
|
||||
listeners[i] = new RecordingTaskManagerListener(nodes[i].discoveryNode, actionMasks);
|
||||
((MockTaskManager) (nodes[i].clusterService.getTaskManager())).addListener(listeners[i]);
|
||||
}
|
||||
return listeners;
|
||||
}
|
||||
|
||||
}
|
@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.node.tasks;
|
||||
|
||||
import org.elasticsearch.action.ListenableActionFuture;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
|
||||
@ -68,7 +69,12 @@ public class TasksIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(MockTransportService.TestPlugin.class);
|
||||
return pluginList(MockTransportService.TestPlugin.class, TestTaskPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
|
||||
return nodePlugins();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -279,6 +285,39 @@ public class TasksIT extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testTasksCancellation() throws Exception {
|
||||
// Start blocking test task
|
||||
// Get real client (the plugin is not registered on transport nodes)
|
||||
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client()).execute();
|
||||
logger.info("--> started test tasks");
|
||||
|
||||
// Wait for the task to start on all nodes
|
||||
assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(),
|
||||
client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()));
|
||||
|
||||
logger.info("--> cancelling the main test task");
|
||||
CancelTasksResponse cancelTasksResponse = client().admin().cluster().prepareCancelTasks().setActions(TestTaskPlugin.TestTaskAction.NAME).get();
|
||||
assertEquals(1, cancelTasksResponse.getTasks().size());
|
||||
|
||||
future.get();
|
||||
|
||||
logger.info("--> checking that test tasks are not running");
|
||||
assertEquals(0, client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "*").get().getTasks().size());
|
||||
|
||||
}
|
||||
|
||||
public void testTasksUnblocking() throws Exception {
|
||||
// Start blocking test task
|
||||
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client()).execute();
|
||||
// Wait for the task to start on all nodes
|
||||
assertBusy(() -> assertEquals(internalCluster().numDataAndMasterNodes(),
|
||||
client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()));
|
||||
|
||||
TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get();
|
||||
|
||||
future.get();
|
||||
assertEquals(0, client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
|
454
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
Normal file
454
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
Normal file
@ -0,0 +1,454 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
|
||||
import org.elasticsearch.action.support.nodes.TransportNodesAction;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.action.support.tasks.TransportTasksAction;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.tasks.CancellableTask;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
import static org.elasticsearch.test.ESTestCase.awaitBusy;
|
||||
|
||||
/**
|
||||
* A plugin that adds a cancellable blocking test task of integration testing of the task manager.
|
||||
*/
|
||||
public class TestTaskPlugin extends Plugin {
|
||||
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "test-task-plugin";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Test plugin for testing task management";
|
||||
}
|
||||
|
||||
public void onModule(ActionModule module) {
|
||||
module.registerAction(TestTaskAction.INSTANCE, TransportTestTaskAction.class);
|
||||
module.registerAction(UnblockTestTasksAction.INSTANCE, TransportUnblockTestTasksAction.class);
|
||||
}
|
||||
|
||||
static class TestTask extends CancellableTask {
|
||||
|
||||
private volatile boolean blocked = true;
|
||||
|
||||
public TestTask(long id, String type, String action, String description, String parentNode, long parentId) {
|
||||
super(id, type, action, description, parentNode, parentId);
|
||||
}
|
||||
|
||||
public boolean isBlocked() {
|
||||
return blocked;
|
||||
}
|
||||
|
||||
public void unblock() {
|
||||
blocked = false;
|
||||
}
|
||||
}
|
||||
|
||||
public static class NodeResponse extends BaseNodeResponse {
|
||||
|
||||
protected NodeResponse() {
|
||||
super();
|
||||
}
|
||||
|
||||
public NodeResponse(DiscoveryNode node) {
|
||||
super(node);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NodesResponse extends BaseNodesResponse<NodeResponse> {
|
||||
|
||||
private int failureCount;
|
||||
|
||||
NodesResponse() {
|
||||
|
||||
}
|
||||
|
||||
public NodesResponse(ClusterName clusterName, NodeResponse[] nodes, int failureCount) {
|
||||
super(clusterName, nodes);
|
||||
this.failureCount = failureCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
failureCount = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(failureCount);
|
||||
}
|
||||
|
||||
public int failureCount() {
|
||||
return failureCount;
|
||||
}
|
||||
}
|
||||
|
||||
public static class NodeRequest extends BaseNodeRequest {
|
||||
protected String requestName;
|
||||
protected String nodeId;
|
||||
|
||||
public NodeRequest() {
|
||||
super();
|
||||
}
|
||||
|
||||
public NodeRequest(NodesRequest request, String nodeId) {
|
||||
super(nodeId);
|
||||
requestName = request.requestName;
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
requestName = in.readString();
|
||||
nodeId = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(requestName);
|
||||
out.writeString(nodeId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "NodeRequest[" + requestName + ", " + nodeId + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) {
|
||||
return new TestTask(id, type, action, this.getDescription(), parentTaskNode, parentTaskId);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NodesRequest extends BaseNodesRequest<NodesRequest> {
|
||||
private String requestName;
|
||||
|
||||
NodesRequest() {
|
||||
super();
|
||||
}
|
||||
|
||||
public NodesRequest(String requestName, String... nodesIds) {
|
||||
super(nodesIds);
|
||||
this.requestName = requestName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
requestName = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeString(requestName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "NodesRequest[" + requestName + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action) {
|
||||
return new CancellableTask(id, type, action, getDescription());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TransportTestTaskAction extends TransportNodesAction<NodesRequest, NodesResponse, NodeRequest, NodeResponse> {
|
||||
|
||||
@Inject
|
||||
public TransportTestTaskAction(Settings settings, ClusterName clusterName, ThreadPool threadPool,
|
||||
ClusterService clusterService, TransportService transportService) {
|
||||
super(settings, TestTaskAction.NAME, clusterName, threadPool, clusterService, transportService,
|
||||
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
|
||||
NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses) {
|
||||
final List<NodeResponse> nodesList = new ArrayList<>();
|
||||
int failureCount = 0;
|
||||
for (int i = 0; i < responses.length(); i++) {
|
||||
Object resp = responses.get(i);
|
||||
if (resp instanceof NodeResponse) { // will also filter out null response for unallocated ones
|
||||
nodesList.add((NodeResponse) resp);
|
||||
} else if (resp instanceof FailedNodeException) {
|
||||
failureCount++;
|
||||
} else {
|
||||
logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp);
|
||||
}
|
||||
}
|
||||
return new NodesResponse(clusterName, nodesList.toArray(new NodeResponse[nodesList.size()]), failureCount);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) {
|
||||
List<String> list = new ArrayList<>();
|
||||
for (String node : nodesIds) {
|
||||
if (nodes.getDataNodes().containsKey(node)) {
|
||||
list.add(node);
|
||||
}
|
||||
}
|
||||
return list.toArray(new String[list.size()]);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) {
|
||||
return new NodeRequest(request, nodeId);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse newNodeResponse() {
|
||||
return new NodeResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Task task, NodesRequest request, ActionListener<NodesResponse> listener) {
|
||||
super.doExecute(task, request, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse nodeOperation(NodeRequest request, Task task) {
|
||||
logger.info("Test task started on the node {}", clusterService.localNode());
|
||||
try {
|
||||
awaitBusy(() -> {
|
||||
if (((CancellableTask) task).isCancelled()) {
|
||||
throw new RuntimeException("Cancelled!");
|
||||
}
|
||||
return ((TestTask) task).isBlocked() == false;
|
||||
});
|
||||
} catch (InterruptedException ex) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
logger.info("Test task finished on the node {}", clusterService.localNode());
|
||||
return new NodeResponse(clusterService.localNode());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodeResponse nodeOperation(NodeRequest request) {
|
||||
throw new UnsupportedOperationException("the task parameter is required");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean accumulateExceptions() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public static class TestTaskAction extends Action<NodesRequest, NodesResponse, NodesRequestBuilder> {
|
||||
|
||||
public static final TestTaskAction INSTANCE = new TestTaskAction();
|
||||
public static final String NAME = "cluster:admin/tasks/test";
|
||||
|
||||
private TestTaskAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodesResponse newResponse() {
|
||||
return new NodesResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodesRequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new NodesRequestBuilder(client, this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NodesRequestBuilder extends ActionRequestBuilder<NodesRequest, NodesResponse, NodesRequestBuilder> {
|
||||
|
||||
protected NodesRequestBuilder(ElasticsearchClient client, Action<NodesRequest, NodesResponse, NodesRequestBuilder> action) {
|
||||
super(client, action, new NodesRequest("test"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class UnblockTestTaskResponse implements Writeable<UnblockTestTaskResponse> {
|
||||
|
||||
public UnblockTestTaskResponse() {
|
||||
|
||||
}
|
||||
|
||||
public UnblockTestTaskResponse(StreamInput in) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnblockTestTaskResponse readFrom(StreamInput in) throws IOException {
|
||||
return new UnblockTestTaskResponse(in);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static class UnblockTestTasksRequest extends BaseTasksRequest<UnblockTestTasksRequest> {
|
||||
|
||||
}
|
||||
|
||||
public static class UnblockTestTasksResponse extends BaseTasksResponse {
|
||||
|
||||
private List<UnblockTestTaskResponse> tasks;
|
||||
|
||||
public UnblockTestTasksResponse() {
|
||||
|
||||
}
|
||||
|
||||
public UnblockTestTasksResponse(List<UnblockTestTaskResponse> tasks, List<TaskOperationFailure> taskFailures, List<? extends
|
||||
FailedNodeException> nodeFailures) {
|
||||
super(taskFailures, nodeFailures);
|
||||
this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
int taskCount = in.readVInt();
|
||||
List<UnblockTestTaskResponse> builder = new ArrayList<>();
|
||||
for (int i = 0; i < taskCount; i++) {
|
||||
builder.add(new UnblockTestTaskResponse(in));
|
||||
}
|
||||
tasks = Collections.unmodifiableList(builder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(tasks.size());
|
||||
for (UnblockTestTaskResponse task : tasks) {
|
||||
task.writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test class for testing task operations
|
||||
*/
|
||||
public static class TransportUnblockTestTasksAction extends TransportTasksAction<Task, UnblockTestTasksRequest,
|
||||
UnblockTestTasksResponse, UnblockTestTaskResponse> {
|
||||
|
||||
@Inject
|
||||
public TransportUnblockTestTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService
|
||||
clusterService,
|
||||
TransportService transportService) {
|
||||
super(settings, UnblockTestTasksAction.NAME, clusterName, threadPool, clusterService, transportService, new ActionFilters(new
|
||||
HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
|
||||
UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UnblockTestTasksResponse newResponse(UnblockTestTasksRequest request, List<UnblockTestTaskResponse> tasks,
|
||||
List<TaskOperationFailure> taskOperationFailures, List<FailedNodeException>
|
||||
failedNodeExceptions) {
|
||||
return new UnblockTestTasksResponse(tasks, taskOperationFailures, failedNodeExceptions);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UnblockTestTaskResponse readTaskResponse(StreamInput in) throws IOException {
|
||||
return new UnblockTestTaskResponse(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected UnblockTestTaskResponse taskOperation(UnblockTestTasksRequest request, Task task) {
|
||||
((TestTask) task).unblock();
|
||||
return new UnblockTestTaskResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean accumulateExceptions() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public static class UnblockTestTasksAction extends Action<UnblockTestTasksRequest, UnblockTestTasksResponse,
|
||||
UnblockTestTasksRequestBuilder> {
|
||||
|
||||
public static final UnblockTestTasksAction INSTANCE = new UnblockTestTasksAction();
|
||||
public static final String NAME = "cluster:admin/tasks/testunblock";
|
||||
|
||||
private UnblockTestTasksAction() {
|
||||
super(NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnblockTestTasksResponse newResponse() {
|
||||
return new UnblockTestTasksResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnblockTestTasksRequestBuilder newRequestBuilder(ElasticsearchClient client) {
|
||||
return new UnblockTestTasksRequestBuilder(client, this);
|
||||
}
|
||||
}
|
||||
|
||||
public static class UnblockTestTasksRequestBuilder extends ActionRequestBuilder<UnblockTestTasksRequest, UnblockTestTasksResponse,
|
||||
UnblockTestTasksRequestBuilder> {
|
||||
|
||||
protected UnblockTestTasksRequestBuilder(ElasticsearchClient client, Action<UnblockTestTasksRequest, UnblockTestTasksResponse,
|
||||
UnblockTestTasksRequestBuilder> action) {
|
||||
super(client, action, new UnblockTestTasksRequest());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
243
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
243
core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
@ -18,23 +18,19 @@
|
||||
*/
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
|
||||
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
|
||||
import org.elasticsearch.action.support.nodes.TransportNodesAction;
|
||||
import org.elasticsearch.action.support.replication.ClusterStateCreationUtils;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||
import org.elasticsearch.action.support.tasks.TransportTasksAction;
|
||||
@ -42,16 +38,11 @@ import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskManager;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.cluster.TestClusterService;
|
||||
import org.elasticsearch.test.tasks.MockTaskManager;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
@ -70,102 +61,13 @@ import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
import static org.elasticsearch.action.support.PlainActionFuture.newFuture;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public class TransportTasksActionTests extends ESTestCase {
|
||||
|
||||
private static ThreadPool threadPool;
|
||||
private static final ClusterName clusterName = new ClusterName("test-cluster");
|
||||
private TestNode[] testNodes;
|
||||
private int nodesCount;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() {
|
||||
threadPool = new ThreadPool(TransportTasksActionTests.class.getSimpleName());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() {
|
||||
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
|
||||
threadPool = null;
|
||||
}
|
||||
|
||||
public void setupTestNodes(Settings settings) {
|
||||
nodesCount = randomIntBetween(2, 10);
|
||||
testNodes = new TestNode[nodesCount];
|
||||
for (int i = 0; i < testNodes.length; i++) {
|
||||
testNodes[i] = new TestNode("node" + i, threadPool, settings);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public final void shutdownTestNodes() throws Exception {
|
||||
for (TestNode testNode : testNodes) {
|
||||
testNode.close();
|
||||
}
|
||||
}
|
||||
|
||||
private static class TestNode implements Releasable {
|
||||
public TestNode(String name, ThreadPool threadPool, Settings settings) {
|
||||
transportService = new TransportService(settings,
|
||||
new LocalTransport(settings, threadPool, Version.CURRENT, new NamedWriteableRegistry()),
|
||||
threadPool, new NamedWriteableRegistry()) {
|
||||
@Override
|
||||
protected TaskManager createTaskManager() {
|
||||
if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) {
|
||||
return new MockTaskManager(settings);
|
||||
} else {
|
||||
return super.createTaskManager();
|
||||
}
|
||||
}
|
||||
};
|
||||
transportService.start();
|
||||
clusterService = new TestClusterService(threadPool, transportService);
|
||||
discoveryNode = new DiscoveryNode(name, transportService.boundAddress().publishAddress(), Version.CURRENT);
|
||||
transportListTasksAction = new TransportListTasksAction(settings, clusterName, threadPool, clusterService, transportService,
|
||||
new ActionFilters(Collections.emptySet()), new IndexNameExpressionResolver(settings));
|
||||
}
|
||||
|
||||
public final TestClusterService clusterService;
|
||||
public final TransportService transportService;
|
||||
public final DiscoveryNode discoveryNode;
|
||||
public final TransportListTasksAction transportListTasksAction;
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
transportService.close();
|
||||
}
|
||||
}
|
||||
|
||||
public static void connectNodes(TestNode... nodes) {
|
||||
DiscoveryNode[] discoveryNodes = new DiscoveryNode[nodes.length];
|
||||
for (int i = 0; i < nodes.length; i++) {
|
||||
discoveryNodes[i] = nodes[i].discoveryNode;
|
||||
}
|
||||
DiscoveryNode master = discoveryNodes[0];
|
||||
for (TestNode node : nodes) {
|
||||
node.clusterService.setState(ClusterStateCreationUtils.state(node.discoveryNode, master, discoveryNodes));
|
||||
}
|
||||
for (TestNode nodeA : nodes) {
|
||||
for (TestNode nodeB : nodes) {
|
||||
nodeA.transportService.connectToNode(nodeB.discoveryNode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static RecordingTaskManagerListener[] setupListeners(TestNode[] nodes, String... actionMasks) {
|
||||
RecordingTaskManagerListener[] listeners = new RecordingTaskManagerListener[nodes.length];
|
||||
for (int i = 0; i < nodes.length; i++) {
|
||||
listeners[i] = new RecordingTaskManagerListener(nodes[i].discoveryNode, actionMasks);
|
||||
((MockTaskManager)(nodes[i].clusterService.getTaskManager())).addListener(listeners[i]);
|
||||
}
|
||||
return listeners;
|
||||
}
|
||||
public class TransportTasksActionTests extends TaskManagerTestCase {
|
||||
|
||||
public static class NodeRequest extends BaseNodeRequest {
|
||||
protected String requestName;
|
||||
@ -197,13 +99,13 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "NodeRequest[" + requestName + ", " + enableTaskManager + "]";
|
||||
return "CancellableNodeRequest[" + requestName + ", " + enableTaskManager + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task createTask(long id, String type, String action) {
|
||||
public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) {
|
||||
if (enableTaskManager) {
|
||||
return super.createTask(id, type, action);
|
||||
return super.createTask(id, type, action, parentTaskNode, parentTaskId);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
@ -214,7 +116,7 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
private String requestName;
|
||||
private boolean enableTaskManager;
|
||||
|
||||
private NodesRequest() {
|
||||
NodesRequest() {
|
||||
super();
|
||||
}
|
||||
|
||||
@ -244,7 +146,7 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "NodesRequest[" + requestName + ", " + enableTaskManager + "]";
|
||||
return "CancellableNodesRequest[" + requestName + ", " + enableTaskManager + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -257,70 +159,14 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
static class NodeResponse extends BaseNodeResponse {
|
||||
|
||||
protected NodeResponse() {
|
||||
super();
|
||||
}
|
||||
|
||||
protected NodeResponse(DiscoveryNode node) {
|
||||
super(node);
|
||||
}
|
||||
}
|
||||
|
||||
static class NodesResponse extends BaseNodesResponse<NodeResponse> {
|
||||
|
||||
private int failureCount;
|
||||
|
||||
protected NodesResponse(ClusterName clusterName, NodeResponse[] nodes, int failureCount) {
|
||||
super(clusterName, nodes);
|
||||
this.failureCount = failureCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
failureCount = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeVInt(failureCount);
|
||||
}
|
||||
|
||||
public int failureCount() {
|
||||
return failureCount;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simulates node-based task that can be used to block node tasks so they are guaranteed to be registered by task manager
|
||||
*/
|
||||
abstract class TestNodesAction extends TransportNodesAction<NodesRequest, NodesResponse, NodeRequest, NodeResponse> {
|
||||
abstract class TestNodesAction extends AbstractTestNodesAction<NodesRequest, NodeRequest> {
|
||||
|
||||
TestNodesAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool,
|
||||
ClusterService clusterService, TransportService transportService) {
|
||||
super(settings, actionName, clusterName, threadPool, clusterService, transportService,
|
||||
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
|
||||
NodesRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected NodesResponse newResponse(NodesRequest request, AtomicReferenceArray responses) {
|
||||
final List<NodeResponse> nodesList = new ArrayList<>();
|
||||
int failureCount = 0;
|
||||
for (int i = 0; i < responses.length(); i++) {
|
||||
Object resp = responses.get(i);
|
||||
if (resp instanceof NodeResponse) { // will also filter out null response for unallocated ones
|
||||
nodesList.add((NodeResponse) resp);
|
||||
} else if (resp instanceof FailedNodeException) {
|
||||
failureCount++;
|
||||
} else {
|
||||
logger.warn("unknown response type [{}], expected NodeLocalGatewayMetaState or FailedNodeException", resp);
|
||||
}
|
||||
}
|
||||
return new NodesResponse(clusterName, nodesList.toArray(new NodeResponse[nodesList.size()]), failureCount);
|
||||
super(settings, actionName, clusterName, threadPool, clusterService, transportService, NodesRequest::new, NodeRequest::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -332,14 +178,6 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
protected NodeResponse newNodeResponse() {
|
||||
return new NodeResponse();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected abstract NodeResponse nodeOperation(NodeRequest request);
|
||||
|
||||
@Override
|
||||
protected boolean accumulateExceptions() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
static class TestTaskResponse implements Writeable<TestTaskResponse> {
|
||||
@ -411,7 +249,7 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
/**
|
||||
* Test class for testing task operations
|
||||
*/
|
||||
static abstract class TestTasksAction extends TransportTasksAction<TestTasksRequest, TestTasksResponse, TestTaskResponse> {
|
||||
static abstract class TestTasksAction extends TransportTasksAction<Task, TestTasksRequest, TestTasksResponse, TestTaskResponse> {
|
||||
|
||||
protected TestTasksAction(Settings settings, String actionName, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService,
|
||||
TransportService transportService) {
|
||||
@ -548,7 +386,7 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
assertEquals(testNodes.length, response.getPerNodeTasks().size());
|
||||
for (Map.Entry<DiscoveryNode, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
|
||||
assertEquals(1, entry.getValue().size());
|
||||
assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription());
|
||||
assertEquals("CancellableNodeRequest[Test Request, true]", entry.getValue().get(0).getDescription());
|
||||
}
|
||||
|
||||
// Make sure that the main task on coordinating node is the task that was returned to us by execute()
|
||||
@ -648,7 +486,7 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
assertEquals(testNodes.length, response.getPerNodeTasks().size());
|
||||
for (Map.Entry<DiscoveryNode, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
|
||||
assertEquals(1, entry.getValue().size());
|
||||
assertEquals("NodeRequest[Test Request, true]", entry.getValue().get(0).getDescription());
|
||||
assertEquals("CancellableNodeRequest[Test Request, true]", entry.getValue().get(0).getDescription());
|
||||
}
|
||||
|
||||
// Release all tasks and wait for response
|
||||
@ -657,6 +495,61 @@ public class TransportTasksActionTests extends ESTestCase {
|
||||
assertEquals(0, responses.failureCount());
|
||||
}
|
||||
|
||||
public void testCancellingTasksThatDontSupportCancellation() throws Exception {
|
||||
setupTestNodes(Settings.EMPTY);
|
||||
connectNodes(testNodes);
|
||||
CountDownLatch checkLatch = new CountDownLatch(1);
|
||||
CountDownLatch responseLatch = new CountDownLatch(1);
|
||||
Task task = startBlockingTestNodesAction(checkLatch, new ActionListener<NodesResponse>() {
|
||||
@Override
|
||||
public void onResponse(NodesResponse nodeResponses) {
|
||||
responseLatch.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
responseLatch.countDown();
|
||||
}
|
||||
});
|
||||
String actionName = "testAction"; // only pick the main action
|
||||
|
||||
// Try to cancel main task using action name
|
||||
CancelTasksRequest request = new CancelTasksRequest(testNodes[0].discoveryNode.getId());
|
||||
request.reason("Testing Cancellation");
|
||||
request.actions(actionName);
|
||||
CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request)
|
||||
.get();
|
||||
|
||||
// Shouldn't match any tasks since testAction doesn't support cancellation
|
||||
assertEquals(0, response.getTasks().size());
|
||||
assertEquals(0, response.getTaskFailures().size());
|
||||
assertEquals(0, response.getNodeFailures().size());
|
||||
|
||||
|
||||
// Try to cancel main task using id
|
||||
request = new CancelTasksRequest(testNodes[0].discoveryNode.getId());
|
||||
request.reason("Testing Cancellation");
|
||||
request.taskId(task.getId());
|
||||
response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request).get();
|
||||
|
||||
// Shouldn't match any tasks since testAction doesn't support cancellation
|
||||
assertEquals(0, response.getTasks().size());
|
||||
assertEquals(0, response.getTaskFailures().size());
|
||||
assertEquals(1, response.getNodeFailures().size());
|
||||
assertThat(response.getNodeFailures().get(0).getDetailedMessage(), containsString("doesn't support cancellation"));
|
||||
|
||||
// Make sure that task is still running
|
||||
ListTasksRequest listTasksRequest = new ListTasksRequest();
|
||||
listTasksRequest.actions(actionName);
|
||||
ListTasksResponse listResponse = testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction.execute
|
||||
(listTasksRequest).get();
|
||||
assertEquals(1, listResponse.getPerNodeTasks().size());
|
||||
|
||||
// Release all tasks and wait for response
|
||||
checkLatch.countDown();
|
||||
responseLatch.await(10, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
public void testFailedTasksCount() throws ExecutionException, InterruptedException, IOException {
|
||||
Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build();
|
||||
setupTestNodes(settings);
|
||||
|
@ -21,6 +21,8 @@ package org.elasticsearch.aliases;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
|
||||
@ -54,6 +56,8 @@ import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.client.Requests.createIndexRequest;
|
||||
import static org.elasticsearch.client.Requests.indexRequest;
|
||||
import static org.elasticsearch.cluster.metadata.AliasAction.Type.ADD;
|
||||
import static org.elasticsearch.cluster.metadata.AliasAction.Type.REMOVE;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_METADATA_BLOCK;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_READ_ONLY_BLOCK;
|
||||
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA;
|
||||
@ -588,7 +592,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
.addAlias("foobar", "foo"));
|
||||
|
||||
assertAcked(admin().indices().prepareAliases()
|
||||
.addAliasAction(new AliasAction(AliasAction.Type.ADD, "foobar", "bac").routing("bla")));
|
||||
.addAliasAction(new AliasAction(ADD, "foobar", "bac").routing("bla")));
|
||||
|
||||
logger.info("--> getting bar and baz for index bazbar");
|
||||
getResponse = admin().indices().prepareGetAliases("bar", "bac").addIndices("bazbar").get();
|
||||
@ -724,8 +728,8 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
assertAcked(admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction(null, "alias1")));
|
||||
fail("create alias should have failed due to null index");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat("Exception text does not contain \"Alias action [add]: [index] may not be empty string\"",
|
||||
e.getMessage(), containsString("Alias action [add]: [index] may not be empty string"));
|
||||
assertThat("Exception text does not contain \"Alias action [add]: [index/indices] may not be empty string\"",
|
||||
e.getMessage(), containsString("Alias action [add]: [index/indices] may not be empty string"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -740,8 +744,8 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
assertAcked(admin().indices().prepareAliases().addAlias((String) null, "empty-alias"));
|
||||
fail("create alias should have failed due to null index");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat("Exception text does not contain \"Alias action [add]: [index] may not be empty string\"",
|
||||
e.getMessage(), containsString("Alias action [add]: [index] may not be empty string"));
|
||||
assertThat("Exception text does not contain \"Alias action [add]: [index/indices] may not be empty string\"",
|
||||
e.getMessage(), containsString("Alias action [add]: [index/indices] may not be empty string"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -750,7 +754,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("", "alias1")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[index] may not be empty string"));
|
||||
assertThat(e.getMessage(), containsString("[index/indices] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(ADD, "", "alias1")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[index/indices] may not be empty string"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -759,7 +769,19 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", null)).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias] may not be empty string"));
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(ADD, "index1", (String)null)).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(ADD, "index1", (String[])null)).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] is either missing or null"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -768,7 +790,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
admin().indices().prepareAliases().addAliasAction(AliasAction.newAddAliasAction("index1", "")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias] may not be empty string"));
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(ADD, "index1", "")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -780,6 +808,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(ADD, null, (String)null)).get();
|
||||
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
}
|
||||
|
||||
public void testAddAliasEmptyAliasEmptyIndex() {
|
||||
@ -790,6 +825,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(ADD, "", "")).get();
|
||||
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRemoveAliasNullIndex() {
|
||||
@ -797,7 +839,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction(null, "alias1")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[index] may not be empty string"));
|
||||
assertThat(e.getMessage(), containsString("[index/indices] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, null, "alias1")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[index/indices] may not be empty string"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -806,7 +854,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("", "alias1")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[index] may not be empty string"));
|
||||
assertThat(e.getMessage(), containsString("[index/indices] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, "", "alias1")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[index/indices] may not be empty string"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -815,7 +869,19 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", null)).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias] may not be empty string"));
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, "index1", (String)null)).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, "index1", (String[])null)).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] is either missing or null"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -824,7 +890,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
admin().indices().prepareAliases().addAliasAction(AliasAction.newRemoveAliasAction("index1", "")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias] may not be empty string"));
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, "index1", "")).get();
|
||||
fail("Expected ActionRequestValidationException");
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.getMessage(), containsString("[alias/aliases] may not be empty string"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -836,6 +908,20 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, null, (String)null)).get();
|
||||
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, (String[])null, (String[])null)).get();
|
||||
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRemoveAliasEmptyAliasEmptyIndex() {
|
||||
@ -846,6 +932,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
try {
|
||||
admin().indices().prepareAliases().addAliasAction(new AliasActions(REMOVE, "", "")).get();
|
||||
fail("Should throw " + ActionRequestValidationException.class.getSimpleName());
|
||||
} catch (ActionRequestValidationException e) {
|
||||
assertThat(e.validationErrors(), notNullValue());
|
||||
assertThat(e.validationErrors().size(), equalTo(2));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetAllAliasesWorks() {
|
||||
|
@ -396,7 +396,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery boolQuery = (BooleanQuery) query;
|
||||
int expectedMinimumShouldMatch = numberOfTerms * percent / 100;
|
||||
if (simpleQueryStringBuilder.defaultOperator().equals(Operator.AND) && numberOfTerms > 1) {
|
||||
if (numberOfTerms == 1 || simpleQueryStringBuilder.defaultOperator().equals(Operator.AND)) {
|
||||
expectedMinimumShouldMatch = 0;
|
||||
}
|
||||
assertEquals(expectedMinimumShouldMatch, boolQuery.getMinimumNumberShouldMatch());
|
||||
|
@ -459,6 +459,11 @@ public class FunctionScoreTests extends ESTestCase {
|
||||
protected boolean doEquals(ScoreFunction other) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
public void testSimpleWeightedFunction() throws IOException, ExecutionException, InterruptedException {
|
||||
@ -615,21 +620,7 @@ public class FunctionScoreTests extends ESTestCase {
|
||||
Float minScore = randomBoolean() ? null : 1.0f;
|
||||
CombineFunction combineFunction = randomFrom(CombineFunction.values());
|
||||
float maxBoost = randomBoolean() ? Float.POSITIVE_INFINITY : randomFloat();
|
||||
ScoreFunction function = randomBoolean() ? null : new ScoreFunction(combineFunction) {
|
||||
@Override
|
||||
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
@Override
|
||||
protected boolean doEquals(ScoreFunction other) {
|
||||
return other == this;
|
||||
}
|
||||
};
|
||||
ScoreFunction function = randomBoolean() ? null : new DummyScoreFunction(combineFunction);
|
||||
|
||||
FunctionScoreQuery q = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost);
|
||||
FunctionScoreQuery q1 = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost);
|
||||
@ -640,23 +631,7 @@ public class FunctionScoreTests extends ESTestCase {
|
||||
|
||||
FunctionScoreQuery diffQuery = new FunctionScoreQuery(new TermQuery(new Term("foo", "baz")), function, minScore, combineFunction, maxBoost);
|
||||
FunctionScoreQuery diffMinScore = new FunctionScoreQuery(q.getSubQuery(), function, minScore == null ? 1.0f : null, combineFunction, maxBoost);
|
||||
ScoreFunction otherFunciton = function == null ? new ScoreFunction(combineFunction) {
|
||||
@Override
|
||||
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(ScoreFunction other) {
|
||||
return other == this;
|
||||
}
|
||||
|
||||
} : null;
|
||||
ScoreFunction otherFunciton = function == null ? new DummyScoreFunction(combineFunction) : null;
|
||||
FunctionScoreQuery diffFunction = new FunctionScoreQuery(q.getSubQuery(), otherFunciton, minScore, combineFunction, maxBoost);
|
||||
FunctionScoreQuery diffMaxBoost = new FunctionScoreQuery(new TermQuery(new Term("foo", "bar")), function, minScore, combineFunction, maxBoost == 1.0f ? 0.9f : 1.0f);
|
||||
q1.setBoost(3.0f);
|
||||
@ -685,22 +660,7 @@ public class FunctionScoreTests extends ESTestCase {
|
||||
public void testFilterFunctionScoreHashCodeAndEquals() {
|
||||
ScoreMode mode = randomFrom(ScoreMode.values());
|
||||
CombineFunction combineFunction = randomFrom(CombineFunction.values());
|
||||
ScoreFunction scoreFunction = new ScoreFunction(combineFunction) {
|
||||
@Override
|
||||
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(ScoreFunction other) {
|
||||
return other == this;
|
||||
}
|
||||
};
|
||||
ScoreFunction scoreFunction = new DummyScoreFunction(combineFunction);
|
||||
Float minScore = randomBoolean() ? null : 1.0f;
|
||||
Float maxBoost = randomBoolean() ? Float.POSITIVE_INFINITY : randomFloat();
|
||||
|
||||
@ -742,4 +702,30 @@ public class FunctionScoreTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static class DummyScoreFunction extends ScoreFunction {
|
||||
protected DummyScoreFunction(CombineFunction scoreCombiner) {
|
||||
super(scoreCombiner);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(ScoreFunction other) {
|
||||
return other == this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -228,7 +228,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
||||
indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
|
||||
fail("Expected MappingParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), equalTo("Unknown Similarity type [unknown_similarity] for [field1]"));
|
||||
assertThat(e.getMessage(), equalTo("Unknown Similarity type [unknown_similarity] for field [field1]"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -255,7 +255,7 @@ public class SimilarityTests extends ESSingleNodeTestCase {
|
||||
parser.parse("type", new CompressedXContent(mapping));
|
||||
fail("Expected MappingParsingException");
|
||||
} catch (MapperParsingException e) {
|
||||
assertThat(e.getMessage(), equalTo("Unknown Similarity type [default] for [field1]"));
|
||||
assertThat(e.getMessage(), equalTo("Unknown Similarity type [default] for field [field1]"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -35,6 +35,7 @@ import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@ -337,4 +338,20 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testUpdateMappingOnAllTypes() throws IOException {
|
||||
assertAcked(prepareCreate("index").addMapping("type1", "f", "type=string").addMapping("type2", "f", "type=string"));
|
||||
|
||||
assertAcked(client().admin().indices().preparePutMapping("index")
|
||||
.setType("type1")
|
||||
.setUpdateAllTypes(true)
|
||||
.setSource("f", "type=string,analyzer=default,null_value=n/a")
|
||||
.get());
|
||||
|
||||
GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").setTypes("type2").get();
|
||||
MappingMetaData type2Mapping = mappings.getMappings().get("index").get("type2").get();
|
||||
Map<String, Object> properties = (Map<String, Object>) type2Mapping.sourceAsMap().get("properties");
|
||||
Map<String, Object> f = (Map<String, Object>) properties.get("f");
|
||||
assertEquals("n/a", f.get("null_value"));
|
||||
}
|
||||
}
|
||||
|
@ -245,8 +245,8 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
||||
builder.sort(SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values())));
|
||||
break;
|
||||
case 1:
|
||||
builder.sort(SortBuilders.geoDistanceSort(randomAsciiOfLengthBetween(5, 20))
|
||||
.geohashes(AbstractQueryTestCase.randomGeohash(1, 12)).order(randomFrom(SortOrder.values())));
|
||||
builder.sort(SortBuilders.geoDistanceSort(randomAsciiOfLengthBetween(5, 20),
|
||||
AbstractQueryTestCase.randomGeohash(1, 12)).order(randomFrom(SortOrder.values())));
|
||||
break;
|
||||
case 2:
|
||||
builder.sort(SortBuilders.scoreSort().order(randomFrom(SortOrder.values())));
|
||||
@ -318,7 +318,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
// NORELEASE need a random suggest builder method
|
||||
builder.suggest(new SuggestBuilder().setText(randomAsciiOfLengthBetween(1, 5)).addSuggestion(
|
||||
builder.suggest(new SuggestBuilder().setGlobalText(randomAsciiOfLengthBetween(1, 5)).addSuggestion(
|
||||
SuggestBuilders.termSuggestion(randomAsciiOfLengthBetween(1, 5))));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
@ -0,0 +1,162 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.sort;
|
||||
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public abstract class AbstractSortTestCase<T extends NamedWriteable<T> & ToXContent & SortElementParserTemp<T>> extends ESTestCase {
|
||||
|
||||
protected static NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
||||
private static final int NUMBER_OF_TESTBUILDERS = 20;
|
||||
static IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
namedWriteableRegistry.registerPrototype(GeoDistanceSortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE);
|
||||
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
namedWriteableRegistry = null;
|
||||
}
|
||||
|
||||
/** Returns random sort that is put under test */
|
||||
protected abstract T createTestItem();
|
||||
|
||||
/** Returns mutated version of original so the returned sort is different in terms of equals/hashcode */
|
||||
protected abstract T mutate(T original) throws IOException;
|
||||
|
||||
/**
|
||||
* Test that creates new sort from a random test sort and checks both for equality
|
||||
*/
|
||||
public void testFromXContent() throws IOException {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
T testItem = createTestItem();
|
||||
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
|
||||
if (randomBoolean()) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
builder.startObject();
|
||||
testItem.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
|
||||
XContentParser itemParser = XContentHelper.createParser(builder.bytes());
|
||||
itemParser.nextToken();
|
||||
|
||||
/*
|
||||
* filter out name of sort, or field name to sort on for element fieldSort
|
||||
*/
|
||||
itemParser.nextToken();
|
||||
String elementName = itemParser.currentName();
|
||||
itemParser.nextToken();
|
||||
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.reset(itemParser);
|
||||
NamedWriteable<T> parsedItem = testItem.fromXContent(context, elementName);
|
||||
assertNotSame(testItem, parsedItem);
|
||||
assertEquals(testItem, parsedItem);
|
||||
assertEquals(testItem.hashCode(), parsedItem.hashCode());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test serialization and deserialization of the test sort.
|
||||
*/
|
||||
public void testSerialization() throws IOException {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
T testsort = createTestItem();
|
||||
T deserializedsort = copyItem(testsort);
|
||||
assertEquals(testsort, deserializedsort);
|
||||
assertEquals(testsort.hashCode(), deserializedsort.hashCode());
|
||||
assertNotSame(testsort, deserializedsort);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test equality and hashCode properties
|
||||
*/
|
||||
public void testEqualsAndHashcode() throws IOException {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
T firstsort = createTestItem();
|
||||
assertFalse("sort is equal to null", firstsort.equals(null));
|
||||
assertFalse("sort is equal to incompatible type", firstsort.equals(""));
|
||||
assertTrue("sort is not equal to self", firstsort.equals(firstsort));
|
||||
assertThat("same sort's hashcode returns different values if called multiple times", firstsort.hashCode(),
|
||||
equalTo(firstsort.hashCode()));
|
||||
assertThat("different sorts should not be equal", mutate(firstsort), not(equalTo(firstsort)));
|
||||
assertThat("different sorts should have different hashcode", mutate(firstsort).hashCode(), not(equalTo(firstsort.hashCode())));
|
||||
|
||||
T secondsort = copyItem(firstsort);
|
||||
assertTrue("sort is not equal to self", secondsort.equals(secondsort));
|
||||
assertTrue("sort is not equal to its copy", firstsort.equals(secondsort));
|
||||
assertTrue("equals is not symmetric", secondsort.equals(firstsort));
|
||||
assertThat("sort copy's hashcode is different from original hashcode", secondsort.hashCode(), equalTo(firstsort.hashCode()));
|
||||
|
||||
T thirdsort = copyItem(secondsort);
|
||||
assertTrue("sort is not equal to self", thirdsort.equals(thirdsort));
|
||||
assertTrue("sort is not equal to its copy", secondsort.equals(thirdsort));
|
||||
assertThat("sort copy's hashcode is different from original hashcode", secondsort.hashCode(), equalTo(thirdsort.hashCode()));
|
||||
assertTrue("equals is not transitive", firstsort.equals(thirdsort));
|
||||
assertThat("sort copy's hashcode is different from original hashcode", firstsort.hashCode(), equalTo(thirdsort.hashCode()));
|
||||
assertTrue("equals is not symmetric", thirdsort.equals(secondsort));
|
||||
assertTrue("equals is not symmetric", thirdsort.equals(firstsort));
|
||||
}
|
||||
}
|
||||
|
||||
protected T copyItem(T original) throws IOException {
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
original.writeTo(output);
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
|
||||
@SuppressWarnings("unchecked")
|
||||
T prototype = (T) namedWriteableRegistry.getPrototype(getPrototype(), original.getWriteableName());
|
||||
T copy = (T) prototype.readFrom(in);
|
||||
return copy;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract Class<T> getPrototype();
|
||||
}
|
@ -33,7 +33,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.query.GeoDistanceQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
@ -53,7 +52,6 @@ import static org.elasticsearch.index.query.QueryBuilders.geoDistanceRangeQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits;
|
||||
@ -62,7 +60,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
||||
@ -201,14 +198,14 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
// SORTING
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("location").point(40.7143528, -74.0059731).order(SortOrder.ASC)).execute()
|
||||
.addSort(SortBuilders.geoDistanceSort("location", 40.7143528, -74.0059731).order(SortOrder.ASC)).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 7);
|
||||
assertOrderedSearchHits(searchResponse, "1", "3", "4", "5", "6", "2", "7");
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("location").point(40.7143528, -74.0059731).order(SortOrder.DESC)).execute()
|
||||
.addSort(SortBuilders.geoDistanceSort("location", 40.7143528, -74.0059731).order(SortOrder.DESC)).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 7);
|
||||
@ -262,7 +259,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Asc
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC)).execute()
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
@ -275,7 +272,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Asc, Mode: max
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
@ -288,7 +285,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Desc
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC)).execute()
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
@ -301,7 +298,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Desc, Mode: min
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
@ -313,7 +310,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
@ -325,7 +322,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 5);
|
||||
@ -336,10 +333,13 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||
|
||||
assertFailures(
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")),
|
||||
RestStatus.BAD_REQUEST, containsString("sort_mode [sum] isn't supported for sorting by geo distance"));
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode("sum"));
|
||||
fail("sum should not be supported for sorting by geo distance");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
||||
// Regression bug:
|
||||
@ -371,7 +371,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Asc
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC)).execute()
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 2);
|
||||
@ -381,7 +381,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Desc
|
||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC)).execute()
|
||||
.addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)).execute()
|
||||
.actionGet();
|
||||
|
||||
// Doc with missing geo point is first, is consistent with 0.20.x
|
||||
@ -444,7 +444,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Asc
|
||||
SearchResponse searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders
|
||||
.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.geoDistanceSort("branches.location", 40.7143528, -74.0059731).order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
@ -456,8 +456,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Asc, Mode: max
|
||||
searchResponse = client()
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location")
|
||||
.point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max").setNestedPath("branches"))
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location",
|
||||
40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max").setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
@ -469,7 +469,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Desc
|
||||
searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders
|
||||
.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).order(SortOrder.DESC).setNestedPath("branches"))
|
||||
.geoDistanceSort("branches.location", 40.7143528, -74.0059731).order(SortOrder.DESC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
@ -481,8 +481,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
|
||||
// Order: Desc, Mode: min
|
||||
searchResponse = client()
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location")
|
||||
.point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min").setNestedPath("branches"))
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location",
|
||||
40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min").setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
@ -493,8 +493,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||
|
||||
searchResponse = client()
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location")
|
||||
.point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.prepareSearch("companies").setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location",
|
||||
40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
@ -505,8 +505,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d));
|
||||
|
||||
searchResponse = client().prepareSearch("companies")
|
||||
.setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location").setNestedPath("branches")
|
||||
.point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC).setNestedPath("branches"))
|
||||
.setQuery(matchAllQuery()).addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731)
|
||||
.setNestedPath("branches").sortMode("avg").order(SortOrder.DESC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 4);
|
||||
@ -517,8 +517,9 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||
|
||||
searchResponse = client().prepareSearch("companies").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location").setNestedFilter(termQuery("branches.name", "brooklyn"))
|
||||
.point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731)
|
||||
.setNestedFilter(termQuery("branches.name", "brooklyn"))
|
||||
.sortMode("avg").order(SortOrder.ASC).setNestedPath("branches"))
|
||||
.execute().actionGet();
|
||||
assertHitCount(searchResponse, 4);
|
||||
assertFirstHit(searchResponse, hasId("4"));
|
||||
@ -528,11 +529,14 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE));
|
||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), equalTo(Double.MAX_VALUE));
|
||||
|
||||
assertFailures(
|
||||
try {
|
||||
client().prepareSearch("companies").setQuery(matchAllQuery())
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location").point(40.7143528, -74.0059731).sortMode("sum")
|
||||
.setNestedPath("branches")),
|
||||
RestStatus.BAD_REQUEST, containsString("sort_mode [sum] isn't supported for sorting by geo distance"));
|
||||
.addSort(SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731).sortMode("sum")
|
||||
.setNestedPath("branches"));
|
||||
fail("Sum should not be allowed as sort mode");
|
||||
} catch (IllegalArgumentException e) {
|
||||
//expected
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user