Rest High Level client: Add List Tasks (#29546)
This change adds a `listTasks` method to the high level java ClusterClient which allows listing running tasks through the task management API. Related to #27205
This commit is contained in:
parent
a75b8adce5
commit
4478f10a2a
|
@ -21,6 +21,8 @@ package org.elasticsearch.client;
|
||||||
|
|
||||||
import org.apache.http.Header;
|
import org.apache.http.Header;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
||||||
|
|
||||||
|
@ -63,4 +65,26 @@ public final class ClusterClient {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(clusterUpdateSettingsRequest, RequestConverters::clusterPutSettings,
|
restHighLevelClient.performRequestAsyncAndParseEntity(clusterUpdateSettingsRequest, RequestConverters::clusterPutSettings,
|
||||||
ClusterUpdateSettingsResponse::fromXContent, listener, emptySet(), headers);
|
ClusterUpdateSettingsResponse::fromXContent, listener, emptySet(), headers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current tasks using the Task Management API
|
||||||
|
* <p>
|
||||||
|
* See
|
||||||
|
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html"> Task Management API on elastic.co</a>
|
||||||
|
*/
|
||||||
|
public ListTasksResponse listTasks(ListTasksRequest request, Header... headers) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
|
||||||
|
emptySet(), headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously get current tasks using the Task Management API
|
||||||
|
* <p>
|
||||||
|
* See
|
||||||
|
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html"> Task Management API on elastic.co</a>
|
||||||
|
*/
|
||||||
|
public void listTasksAsync(ListTasksRequest request, ActionListener<ListTasksResponse> listener, Header... headers) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
|
||||||
|
listener, emptySet(), headers);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.http.entity.ByteArrayEntity;
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.entity.ContentType;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.action.DocWriteRequest;
|
import org.elasticsearch.action.DocWriteRequest;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||||
|
@ -45,8 +46,8 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||||
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
|
||||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
|
||||||
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
|
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
|
||||||
|
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
||||||
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
||||||
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
||||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||||
|
@ -83,6 +84,7 @@ import org.elasticsearch.index.rankeval.RankEvalRequest;
|
||||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||||
|
import org.elasticsearch.tasks.TaskId;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -606,6 +608,22 @@ final class RequestConverters {
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Request listTasks(ListTasksRequest listTaskRequest) {
|
||||||
|
if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) {
|
||||||
|
throw new IllegalArgumentException("TaskId cannot be used for list tasks request");
|
||||||
|
}
|
||||||
|
Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
|
||||||
|
Params params = new Params(request);
|
||||||
|
params.withTimeout(listTaskRequest.getTimeout())
|
||||||
|
.withDetailed(listTaskRequest.getDetailed())
|
||||||
|
.withWaitForCompletion(listTaskRequest.getWaitForCompletion())
|
||||||
|
.withParentTaskId(listTaskRequest.getParentTaskId())
|
||||||
|
.withNodes(listTaskRequest.getNodes())
|
||||||
|
.withActions(listTaskRequest.getActions())
|
||||||
|
.putParam("group_by", "none");
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
|
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
|
||||||
String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
|
String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
|
||||||
.addPathPart(rolloverRequest.getNewIndexName()).build();
|
.addPathPart(rolloverRequest.getNewIndexName()).build();
|
||||||
|
@ -932,6 +950,41 @@ final class RequestConverters {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Params withDetailed(boolean detailed) {
|
||||||
|
if (detailed) {
|
||||||
|
return putParam("detailed", Boolean.TRUE.toString());
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
Params withWaitForCompletion(boolean waitForCompletion) {
|
||||||
|
if (waitForCompletion) {
|
||||||
|
return putParam("wait_for_completion", Boolean.TRUE.toString());
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
Params withNodes(String[] nodes) {
|
||||||
|
if (nodes != null && nodes.length > 0) {
|
||||||
|
return putParam("nodes", String.join(",", nodes));
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
Params withActions(String[] actions) {
|
||||||
|
if (actions != null && actions.length > 0) {
|
||||||
|
return putParam("actions", String.join(",", actions));
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
Params withParentTaskId(TaskId parentTaskId) {
|
||||||
|
if (parentTaskId != null && parentTaskId.isSet()) {
|
||||||
|
return putParam("parent_task_id", parentTaskId.toString());
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
Params withVerify(boolean verify) {
|
Params withVerify(boolean verify) {
|
||||||
if (verify) {
|
if (verify) {
|
||||||
return putParam("verify", Boolean.TRUE.toString());
|
return putParam("verify", Boolean.TRUE.toString());
|
||||||
|
|
|
@ -20,6 +20,9 @@
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
||||||
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
|
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
|
||||||
|
@ -29,13 +32,16 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
import org.elasticsearch.tasks.TaskInfo;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static java.util.Collections.emptyList;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
|
@ -105,4 +111,29 @@ public class ClusterClientIT extends ESRestHighLevelClientTestCase {
|
||||||
assertThat(exception.getMessage(), equalTo(
|
assertThat(exception.getMessage(), equalTo(
|
||||||
"Elasticsearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]"));
|
"Elasticsearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testListTasks() throws IOException {
|
||||||
|
ListTasksRequest request = new ListTasksRequest();
|
||||||
|
ListTasksResponse response = execute(request, highLevelClient().cluster()::listTasks, highLevelClient().cluster()::listTasksAsync);
|
||||||
|
|
||||||
|
assertThat(response, notNullValue());
|
||||||
|
assertThat(response.getNodeFailures(), equalTo(emptyList()));
|
||||||
|
assertThat(response.getTaskFailures(), equalTo(emptyList()));
|
||||||
|
// It's possible that there are other tasks except 'cluster:monitor/tasks/lists[n]' and 'action":"cluster:monitor/tasks/lists'
|
||||||
|
assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
|
||||||
|
boolean listTasksFound = false;
|
||||||
|
for (TaskGroup taskGroup : response.getTaskGroups()) {
|
||||||
|
TaskInfo parent = taskGroup.getTaskInfo();
|
||||||
|
if ("cluster:monitor/tasks/lists".equals(parent.getAction())) {
|
||||||
|
assertThat(taskGroup.getChildTasks().size(), equalTo(1));
|
||||||
|
TaskGroup childGroup = taskGroup.getChildTasks().iterator().next();
|
||||||
|
assertThat(childGroup.getChildTasks().isEmpty(), equalTo(true));
|
||||||
|
TaskInfo child = childGroup.getTaskInfo();
|
||||||
|
assertThat(child.getAction(), equalTo("cluster:monitor/tasks/lists[n]"));
|
||||||
|
assertThat(child.getParentTaskId(), equalTo(parent.getTaskId()));
|
||||||
|
listTasksFound = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("List tasks were not found", listTasksFound);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.http.entity.ByteArrayEntity;
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.DocWriteRequest;
|
import org.elasticsearch.action.DocWriteRequest;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||||
|
@ -111,6 +112,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||||
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
|
||||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||||
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
|
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
|
||||||
|
import org.elasticsearch.tasks.TaskId;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.RandomObjects;
|
import org.elasticsearch.test.RandomObjects;
|
||||||
|
|
||||||
|
@ -142,6 +144,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC
|
||||||
import static org.hamcrest.CoreMatchers.equalTo;
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.hasEntry;
|
import static org.hamcrest.Matchers.hasEntry;
|
||||||
import static org.hamcrest.Matchers.hasKey;
|
import static org.hamcrest.Matchers.hasKey;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
public class RequestConvertersTests extends ESTestCase {
|
public class RequestConvertersTests extends ESTestCase {
|
||||||
|
@ -188,8 +191,7 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
|
|
||||||
int numberOfRequests = randomIntBetween(0, 32);
|
int numberOfRequests = randomIntBetween(0, 32);
|
||||||
for (int i = 0; i < numberOfRequests; i++) {
|
for (int i = 0; i < numberOfRequests; i++) {
|
||||||
MultiGetRequest.Item item =
|
MultiGetRequest.Item item = new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4));
|
||||||
new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4));
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
item.routing(randomAlphaOfLength(4));
|
item.routing(randomAlphaOfLength(4));
|
||||||
}
|
}
|
||||||
|
@ -422,7 +424,8 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
setRandomLocal(getSettingsRequest, expectedParams);
|
setRandomLocal(getSettingsRequest, expectedParams);
|
||||||
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
//the request object will not have include_defaults present unless it is set to true
|
// the request object will not have include_defaults present unless it is set to
|
||||||
|
// true
|
||||||
getSettingsRequest.includeDefaults(randomBoolean());
|
getSettingsRequest.includeDefaults(randomBoolean());
|
||||||
if (getSettingsRequest.includeDefaults()) {
|
if (getSettingsRequest.includeDefaults()) {
|
||||||
expectedParams.put("include_defaults", Boolean.toString(true));
|
expectedParams.put("include_defaults", Boolean.toString(true));
|
||||||
|
@ -966,22 +969,21 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.SMILE));
|
bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.SMILE));
|
||||||
bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON));
|
bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON));
|
||||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest));
|
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest));
|
||||||
assertEquals("Mismatching content-type found for request with content-type [JSON], " +
|
assertEquals(
|
||||||
"previous requests have content-type [SMILE]", exception.getMessage());
|
"Mismatching content-type found for request with content-type [JSON], " + "previous requests have content-type [SMILE]",
|
||||||
|
exception.getMessage());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
BulkRequest bulkRequest = new BulkRequest();
|
BulkRequest bulkRequest = new BulkRequest();
|
||||||
bulkRequest.add(new IndexRequest("index", "type", "0")
|
bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.JSON));
|
||||||
.source(singletonMap("field", "value"), XContentType.JSON));
|
bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON));
|
||||||
bulkRequest.add(new IndexRequest("index", "type", "1")
|
|
||||||
.source(singletonMap("field", "value"), XContentType.JSON));
|
|
||||||
bulkRequest.add(new UpdateRequest("index", "type", "2")
|
bulkRequest.add(new UpdateRequest("index", "type", "2")
|
||||||
.doc(new IndexRequest().source(singletonMap("field", "value"), XContentType.JSON))
|
.doc(new IndexRequest().source(singletonMap("field", "value"), XContentType.JSON))
|
||||||
.upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE))
|
.upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE)));
|
||||||
);
|
|
||||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest));
|
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest));
|
||||||
assertEquals("Mismatching content-type found for request with content-type [SMILE], " +
|
assertEquals(
|
||||||
"previous requests have content-type [JSON]", exception.getMessage());
|
"Mismatching content-type found for request with content-type [SMILE], " + "previous requests have content-type [JSON]",
|
||||||
|
exception.getMessage());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
XContentType xContentType = randomFrom(XContentType.CBOR, XContentType.YAML);
|
XContentType xContentType = randomFrom(XContentType.CBOR, XContentType.YAML);
|
||||||
|
@ -1024,7 +1026,8 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||||
// rarely skip setting the search source completely
|
// rarely skip setting the search source completely
|
||||||
if (frequently()) {
|
if (frequently()) {
|
||||||
//frequently set the search source to have some content, otherwise leave it empty but still set it
|
// frequently set the search source to have some content, otherwise leave it
|
||||||
|
// empty but still set it
|
||||||
if (frequently()) {
|
if (frequently()) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
searchSourceBuilder.size(randomIntBetween(0, Integer.MAX_VALUE));
|
searchSourceBuilder.size(randomIntBetween(0, Integer.MAX_VALUE));
|
||||||
|
@ -1094,7 +1097,8 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
|
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
|
||||||
for (int i = 0; i < numberOfSearchRequests; i++) {
|
for (int i = 0; i < numberOfSearchRequests; i++) {
|
||||||
SearchRequest searchRequest = randomSearchRequest(() -> {
|
SearchRequest searchRequest = randomSearchRequest(() -> {
|
||||||
// No need to return a very complex SearchSourceBuilder here, that is tested elsewhere
|
// No need to return a very complex SearchSourceBuilder here, that is tested
|
||||||
|
// elsewhere
|
||||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||||
searchSourceBuilder.from(randomInt(10));
|
searchSourceBuilder.from(randomInt(10));
|
||||||
searchSourceBuilder.size(randomIntBetween(20, 100));
|
searchSourceBuilder.size(randomIntBetween(20, 100));
|
||||||
|
@ -1102,14 +1106,13 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
});
|
});
|
||||||
// scroll is not supported in the current msearch api, so unset it:
|
// scroll is not supported in the current msearch api, so unset it:
|
||||||
searchRequest.scroll((Scroll) null);
|
searchRequest.scroll((Scroll) null);
|
||||||
// only expand_wildcards, ignore_unavailable and allow_no_indices can be specified from msearch api, so unset other options:
|
// only expand_wildcards, ignore_unavailable and allow_no_indices can be
|
||||||
|
// specified from msearch api, so unset other options:
|
||||||
IndicesOptions randomlyGenerated = searchRequest.indicesOptions();
|
IndicesOptions randomlyGenerated = searchRequest.indicesOptions();
|
||||||
IndicesOptions msearchDefault = new MultiSearchRequest().indicesOptions();
|
IndicesOptions msearchDefault = new MultiSearchRequest().indicesOptions();
|
||||||
searchRequest.indicesOptions(IndicesOptions.fromOptions(
|
searchRequest.indicesOptions(IndicesOptions.fromOptions(randomlyGenerated.ignoreUnavailable(),
|
||||||
randomlyGenerated.ignoreUnavailable(), randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(),
|
randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), randomlyGenerated.expandWildcardsClosed(),
|
||||||
randomlyGenerated.expandWildcardsClosed(), msearchDefault.allowAliasesToMultipleIndices(),
|
msearchDefault.allowAliasesToMultipleIndices(), msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases()));
|
||||||
msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases()
|
|
||||||
));
|
|
||||||
multiSearchRequest.add(searchRequest);
|
multiSearchRequest.add(searchRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1134,8 +1137,8 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
requests.add(searchRequest);
|
requests.add(searchRequest);
|
||||||
};
|
};
|
||||||
MultiSearchRequest.readMultiLineFormat(new BytesArray(EntityUtils.toByteArray(request.getEntity())),
|
MultiSearchRequest.readMultiLineFormat(new BytesArray(EntityUtils.toByteArray(request.getEntity())),
|
||||||
REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null,
|
REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null, null,
|
||||||
null, xContentRegistry(), true);
|
xContentRegistry(), true);
|
||||||
assertEquals(requests, multiSearchRequest.requests());
|
assertEquals(requests, multiSearchRequest.requests());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1261,15 +1264,15 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
public void testExistsAliasNoAliasNoIndex() {
|
public void testExistsAliasNoAliasNoIndex() {
|
||||||
{
|
{
|
||||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
||||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () ->
|
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
||||||
RequestConverters.existsAlias(getAliasesRequest));
|
() -> RequestConverters.existsAlias(getAliasesRequest));
|
||||||
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null);
|
GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null);
|
||||||
getAliasesRequest.indices((String[]) null);
|
getAliasesRequest.indices((String[]) null);
|
||||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () ->
|
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
||||||
RequestConverters.existsAlias(getAliasesRequest));
|
() -> RequestConverters.existsAlias(getAliasesRequest));
|
||||||
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1279,14 +1282,10 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
String[] indices = randomIndicesNames(0, 5);
|
String[] indices = randomIndicesNames(0, 5);
|
||||||
String[] fields = generateRandomStringArray(5, 10, false, false);
|
String[] fields = generateRandomStringArray(5, 10, false, false);
|
||||||
|
|
||||||
FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest()
|
FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest().indices(indices).fields(fields);
|
||||||
.indices(indices)
|
|
||||||
.fields(fields);
|
|
||||||
|
|
||||||
Map<String, String> indicesOptionsParams = new HashMap<>();
|
Map<String, String> indicesOptionsParams = new HashMap<>();
|
||||||
setRandomIndicesOptions(fieldCapabilitiesRequest::indicesOptions,
|
setRandomIndicesOptions(fieldCapabilitiesRequest::indicesOptions, fieldCapabilitiesRequest::indicesOptions, indicesOptionsParams);
|
||||||
fieldCapabilitiesRequest::indicesOptions,
|
|
||||||
indicesOptionsParams);
|
|
||||||
|
|
||||||
Request request = RequestConverters.fieldCaps(fieldCapabilitiesRequest);
|
Request request = RequestConverters.fieldCaps(fieldCapabilitiesRequest);
|
||||||
|
|
||||||
|
@ -1301,12 +1300,13 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||||
assertEquals(4, request.getParameters().size());
|
assertEquals(4, request.getParameters().size());
|
||||||
|
|
||||||
// Note that we don't check the field param value explicitly, as field names are passed through
|
// Note that we don't check the field param value explicitly, as field names are
|
||||||
// a hash set before being added to the request, and can appear in a non-deterministic order.
|
// passed through
|
||||||
|
// a hash set before being added to the request, and can appear in a
|
||||||
|
// non-deterministic order.
|
||||||
assertThat(request.getParameters(), hasKey("fields"));
|
assertThat(request.getParameters(), hasKey("fields"));
|
||||||
String[] requestFields = Strings.splitStringByCommaToArray(request.getParameters().get("fields"));
|
String[] requestFields = Strings.splitStringByCommaToArray(request.getParameters().get("fields"));
|
||||||
assertEquals(new HashSet<>(Arrays.asList(fields)),
|
assertEquals(new HashSet<>(Arrays.asList(fields)), new HashSet<>(Arrays.asList(requestFields)));
|
||||||
new HashSet<>(Arrays.asList(requestFields)));
|
|
||||||
|
|
||||||
for (Map.Entry<String, String> param : indicesOptionsParams.entrySet()) {
|
for (Map.Entry<String, String> param : indicesOptionsParams.entrySet()) {
|
||||||
assertThat(request.getParameters(), hasEntry(param.getKey(), param.getValue()));
|
assertThat(request.getParameters(), hasEntry(param.getKey(), param.getValue()));
|
||||||
|
@ -1465,6 +1465,66 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
assertEquals(expectedParams, request.getParameters());
|
assertEquals(expectedParams, request.getParameters());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testListTasks() {
|
||||||
|
{
|
||||||
|
ListTasksRequest request = new ListTasksRequest();
|
||||||
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setDetailed(randomBoolean());
|
||||||
|
if (request.getDetailed()) {
|
||||||
|
expectedParams.put("detailed", "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
request.setWaitForCompletion(randomBoolean());
|
||||||
|
if (request.getWaitForCompletion()) {
|
||||||
|
expectedParams.put("wait_for_completion", "true");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
String timeout = randomTimeValue();
|
||||||
|
request.setTimeout(timeout);
|
||||||
|
expectedParams.put("timeout", timeout);
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
if (randomBoolean()) {
|
||||||
|
TaskId taskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong());
|
||||||
|
request.setParentTaskId(taskId);
|
||||||
|
expectedParams.put("parent_task_id", taskId.toString());
|
||||||
|
} else {
|
||||||
|
request.setParentTask(TaskId.EMPTY_TASK_ID);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
String[] nodes = generateRandomStringArray(10, 8, false);
|
||||||
|
request.setNodes(nodes);
|
||||||
|
if (nodes.length > 0) {
|
||||||
|
expectedParams.put("nodes", String.join(",", nodes));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (randomBoolean()) {
|
||||||
|
String[] actions = generateRandomStringArray(10, 8, false);
|
||||||
|
request.setActions(actions);
|
||||||
|
if (actions.length > 0) {
|
||||||
|
expectedParams.put("actions", String.join(",", actions));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expectedParams.put("group_by", "none");
|
||||||
|
Request httpRequest = RequestConverters.listTasks(request);
|
||||||
|
assertThat(httpRequest, notNullValue());
|
||||||
|
assertThat(httpRequest.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||||
|
assertThat(httpRequest.getEntity(), nullValue());
|
||||||
|
assertThat(httpRequest.getEndpoint(), equalTo("/_tasks"));
|
||||||
|
assertThat(httpRequest.getParameters(), equalTo(expectedParams));
|
||||||
|
}
|
||||||
|
{
|
||||||
|
ListTasksRequest request = new ListTasksRequest();
|
||||||
|
request.setTaskId(new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()));
|
||||||
|
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.listTasks(request));
|
||||||
|
assertEquals("TaskId cannot be used for list tasks request", exception.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testGetRepositories() {
|
public void testGetRepositories() {
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
StringBuilder endpoint = new StringBuilder("/_snapshot");
|
StringBuilder endpoint = new StringBuilder("/_snapshot");
|
||||||
|
@ -1513,8 +1573,7 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
names.put("-#template", "-%23template");
|
names.put("-#template", "-%23template");
|
||||||
names.put("foo^bar", "foo%5Ebar");
|
names.put("foo^bar", "foo%5Ebar");
|
||||||
|
|
||||||
PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest()
|
PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest().name(randomFrom(names.keySet()))
|
||||||
.name(randomFrom(names.keySet()))
|
|
||||||
.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
|
.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
putTemplateRequest.order(randomInt());
|
putTemplateRequest.order(randomInt());
|
||||||
|
@ -1572,14 +1631,12 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
assertEquals("/a/b", endpointBuilder.build());
|
assertEquals("/a/b", endpointBuilder.build());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a").addPathPart("b")
|
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a").addPathPart("b").addPathPartAsIs("_create");
|
||||||
.addPathPartAsIs("_create");
|
|
||||||
assertEquals("/a/b/_create", endpointBuilder.build());
|
assertEquals("/a/b/_create", endpointBuilder.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a", "b", "c")
|
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a", "b", "c").addPathPartAsIs("_create");
|
||||||
.addPathPartAsIs("_create");
|
|
||||||
assertEquals("/a/b/c/_create", endpointBuilder.build());
|
assertEquals("/a/b/c/_create", endpointBuilder.build());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
@ -1638,13 +1695,12 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
assertEquals("/foo%5Ebar", endpointBuilder.build());
|
assertEquals("/foo%5Ebar", endpointBuilder.build());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("cluster1:index1,index2")
|
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("cluster1:index1,index2").addPathPartAsIs("_search");
|
||||||
.addPathPartAsIs("_search");
|
|
||||||
assertEquals("/cluster1:index1,index2/_search", endpointBuilder.build());
|
assertEquals("/cluster1:index1,index2/_search", endpointBuilder.build());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
EndpointBuilder endpointBuilder = new EndpointBuilder()
|
EndpointBuilder endpointBuilder = new EndpointBuilder().addCommaSeparatedPathParts(new String[] { "index1", "index2" })
|
||||||
.addCommaSeparatedPathParts(new String[]{"index1", "index2"}).addPathPartAsIs("cache/clear");
|
.addPathPartAsIs("cache/clear");
|
||||||
assertEquals("/index1,index2/cache/clear", endpointBuilder.build());
|
assertEquals("/index1,index2/cache/clear", endpointBuilder.build());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1654,10 +1710,10 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
assertEquals("/index/type/id/_endpoint", RequestConverters.endpoint("index", "type", "id", "_endpoint"));
|
assertEquals("/index/type/id/_endpoint", RequestConverters.endpoint("index", "type", "id", "_endpoint"));
|
||||||
assertEquals("/index1,index2", RequestConverters.endpoint(new String[] { "index1", "index2" }));
|
assertEquals("/index1,index2", RequestConverters.endpoint(new String[] { "index1", "index2" }));
|
||||||
assertEquals("/index1,index2/_endpoint", RequestConverters.endpoint(new String[] { "index1", "index2" }, "_endpoint"));
|
assertEquals("/index1,index2/_endpoint", RequestConverters.endpoint(new String[] { "index1", "index2" }, "_endpoint"));
|
||||||
assertEquals("/index1,index2/type1,type2/_endpoint", RequestConverters.endpoint(new String[]{"index1", "index2"},
|
assertEquals("/index1,index2/type1,type2/_endpoint",
|
||||||
new String[]{"type1", "type2"}, "_endpoint"));
|
RequestConverters.endpoint(new String[] { "index1", "index2" }, new String[] { "type1", "type2" }, "_endpoint"));
|
||||||
assertEquals("/index1,index2/_endpoint/suffix1,suffix2", RequestConverters.endpoint(new String[]{"index1", "index2"},
|
assertEquals("/index1,index2/_endpoint/suffix1,suffix2",
|
||||||
"_endpoint", new String[]{"suffix1", "suffix2"}));
|
RequestConverters.endpoint(new String[] { "index1", "index2" }, "_endpoint", new String[] { "suffix1", "suffix2" }));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCreateContentType() {
|
public void testCreateContentType() {
|
||||||
|
@ -1673,20 +1729,22 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
|
|
||||||
XContentType bulkContentType = randomBoolean() ? xContentType : null;
|
XContentType bulkContentType = randomBoolean() ? xContentType : null;
|
||||||
|
|
||||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () ->
|
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
||||||
enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR), bulkContentType));
|
() -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR),
|
||||||
|
bulkContentType));
|
||||||
assertEquals("Unsupported content-type found for request with content-type [CBOR], only JSON and SMILE are supported",
|
assertEquals("Unsupported content-type found for request with content-type [CBOR], only JSON and SMILE are supported",
|
||||||
exception.getMessage());
|
exception.getMessage());
|
||||||
|
|
||||||
exception = expectThrows(IllegalArgumentException.class, () ->
|
exception = expectThrows(IllegalArgumentException.class,
|
||||||
enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML), bulkContentType));
|
() -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML),
|
||||||
|
bulkContentType));
|
||||||
assertEquals("Unsupported content-type found for request with content-type [YAML], only JSON and SMILE are supported",
|
assertEquals("Unsupported content-type found for request with content-type [YAML], only JSON and SMILE are supported",
|
||||||
exception.getMessage());
|
exception.getMessage());
|
||||||
|
|
||||||
XContentType requestContentType = xContentType == XContentType.JSON ? XContentType.SMILE : XContentType.JSON;
|
XContentType requestContentType = xContentType == XContentType.JSON ? XContentType.SMILE : XContentType.JSON;
|
||||||
|
|
||||||
exception = expectThrows(IllegalArgumentException.class, () ->
|
exception = expectThrows(IllegalArgumentException.class,
|
||||||
enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType));
|
() -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType));
|
||||||
assertEquals("Mismatching content-type found for request with content-type [" + requestContentType + "], "
|
assertEquals("Mismatching content-type found for request with content-type [" + requestContentType + "], "
|
||||||
+ "previous requests have content-type [" + xContentType + "]", exception.getMessage());
|
+ "previous requests have content-type [" + xContentType + "]", exception.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -1757,8 +1815,7 @@ public class RequestConvertersTests extends ESTestCase {
|
||||||
Map<String, String> expectedParams) {
|
Map<String, String> expectedParams) {
|
||||||
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(),
|
setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
|
||||||
randomBoolean()));
|
|
||||||
}
|
}
|
||||||
expectedParams.put("ignore_unavailable", Boolean.toString(getter.get().ignoreUnavailable()));
|
expectedParams.put("ignore_unavailable", Boolean.toString(getter.get().ignoreUnavailable()));
|
||||||
expectedParams.put("allow_no_indices", Boolean.toString(getter.get().allowNoIndices()));
|
expectedParams.put("allow_no_indices", Boolean.toString(getter.get().allowNoIndices()));
|
||||||
|
|
|
@ -19,8 +19,14 @@
|
||||||
|
|
||||||
package org.elasticsearch.client.documentation;
|
package org.elasticsearch.client.documentation;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.action.FailedNodeException;
|
||||||
import org.elasticsearch.action.LatchedActionListener;
|
import org.elasticsearch.action.LatchedActionListener;
|
||||||
|
import org.elasticsearch.action.TaskOperationFailure;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||||
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
||||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||||
|
@ -31,14 +37,20 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.indices.recovery.RecoverySettings;
|
import org.elasticsearch.indices.recovery.RecoverySettings;
|
||||||
|
import org.elasticsearch.tasks.TaskId;
|
||||||
|
import org.elasticsearch.tasks.TaskInfo;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static java.util.Collections.emptyList;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is used to generate the Java Cluster API documentation.
|
* This class is used to generate the Java Cluster API documentation.
|
||||||
|
@ -177,4 +189,87 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
|
||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testListTasks() throws IOException {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
{
|
||||||
|
// tag::list-tasks-request
|
||||||
|
ListTasksRequest request = new ListTasksRequest();
|
||||||
|
// end::list-tasks-request
|
||||||
|
|
||||||
|
// tag::list-tasks-request-filter
|
||||||
|
request.setActions("cluster:*"); // <1>
|
||||||
|
request.setNodes("nodeId1", "nodeId2"); // <2>
|
||||||
|
request.setParentTaskId(new TaskId("parentTaskId", 42)); // <3>
|
||||||
|
// end::list-tasks-request-filter
|
||||||
|
|
||||||
|
// tag::list-tasks-request-detailed
|
||||||
|
request.setDetailed(true); // <1>
|
||||||
|
// end::list-tasks-request-detailed
|
||||||
|
|
||||||
|
// tag::list-tasks-request-wait-completion
|
||||||
|
request.setWaitForCompletion(true); // <1>
|
||||||
|
request.setTimeout(TimeValue.timeValueSeconds(50)); // <2>
|
||||||
|
request.setTimeout("50s"); // <3>
|
||||||
|
// end::list-tasks-request-wait-completion
|
||||||
|
}
|
||||||
|
|
||||||
|
ListTasksRequest request = new ListTasksRequest();
|
||||||
|
|
||||||
|
// tag::list-tasks-execute
|
||||||
|
ListTasksResponse response = client.cluster().listTasks(request);
|
||||||
|
// end::list-tasks-execute
|
||||||
|
|
||||||
|
assertThat(response, notNullValue());
|
||||||
|
|
||||||
|
// tag::list-tasks-response-tasks
|
||||||
|
List<TaskInfo> tasks = response.getTasks(); // <1>
|
||||||
|
// end::list-tasks-response-tasks
|
||||||
|
|
||||||
|
// tag::list-tasks-response-calc
|
||||||
|
Map<String, List<TaskInfo>> perNodeTasks = response.getPerNodeTasks(); // <1>
|
||||||
|
List<TaskGroup> groups = response.getTaskGroups(); // <2>
|
||||||
|
// end::list-tasks-response-calc
|
||||||
|
|
||||||
|
// tag::list-tasks-response-failures
|
||||||
|
List<ElasticsearchException> nodeFailures = response.getNodeFailures(); // <1>
|
||||||
|
List<TaskOperationFailure> taskFailures = response.getTaskFailures(); // <2>
|
||||||
|
// end::list-tasks-response-failures
|
||||||
|
|
||||||
|
assertThat(response.getNodeFailures(), equalTo(emptyList()));
|
||||||
|
assertThat(response.getTaskFailures(), equalTo(emptyList()));
|
||||||
|
assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testListTasksAsync() throws Exception {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
{
|
||||||
|
ListTasksRequest request = new ListTasksRequest();
|
||||||
|
|
||||||
|
// tag::list-tasks-execute-listener
|
||||||
|
ActionListener<ListTasksResponse> listener =
|
||||||
|
new ActionListener<ListTasksResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(ListTasksResponse response) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// end::list-tasks-execute-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::list-tasks-execute-async
|
||||||
|
client.cluster().listTasksAsync(request, listener); // <1>
|
||||||
|
// end::list-tasks-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,101 @@
|
||||||
|
[[java-rest-high-cluster-list-tasks]]
|
||||||
|
=== List Tasks API
|
||||||
|
|
||||||
|
The List Tasks API allows to get information about the tasks currently executing in the cluster.
|
||||||
|
|
||||||
|
[[java-rest-high-cluster-list-tasks-request]]
|
||||||
|
==== List Tasks Request
|
||||||
|
|
||||||
|
A `ListTasksRequest`:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
There is no required parameters. By default the client will list all tasks and will not wait
|
||||||
|
for task completion.
|
||||||
|
|
||||||
|
==== Parameters
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-filter]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Request only cluster-related tasks
|
||||||
|
<2> Request all tasks running on nodes nodeId1 and nodeId2
|
||||||
|
<3> Request only children of a particular task
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-detailed]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Should the information include detailed, potentially slow to generate data. Defaults to `false`
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-wait-completion]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Should this request wait for all found tasks to complete. Defaults to `false`
|
||||||
|
<2> Timeout for the request as a `TimeValue`. Applicable only if `setWaitForCompletion` is `true`.
|
||||||
|
Defaults to 30 seconds
|
||||||
|
<3> Timeout as a `String`
|
||||||
|
|
||||||
|
[[java-rest-high-cluster-list-tasks-sync]]
|
||||||
|
==== Synchronous Execution
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-cluster-list-tasks-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
The asynchronous execution of a cluster update settings requires both the
|
||||||
|
`ListTasksRequest` instance and an `ActionListener` instance to be
|
||||||
|
passed to the asynchronous method:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `ListTasksRequest` to execute and the `ActionListener` to use
|
||||||
|
when the execution completes
|
||||||
|
|
||||||
|
The asynchronous method does not block and returns immediately. Once it is
|
||||||
|
completed the `ActionListener` is called back using the `onResponse` method
|
||||||
|
if the execution successfully completed or using the `onFailure` method if
|
||||||
|
it failed.
|
||||||
|
|
||||||
|
A typical listener for `ListTasksResponse` looks like:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Called when the execution is successfully completed. The response is
|
||||||
|
provided as an argument
|
||||||
|
<2> Called in case of a failure. The raised exception is provided as an argument
|
||||||
|
|
||||||
|
[[java-rest-high-cluster-list-tasks-response]]
|
||||||
|
==== List Tasks Response
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-tasks]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> List of currently running tasks
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-calc]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> List of tasks grouped by a node
|
||||||
|
<2> List of tasks grouped by a parent task
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-failures]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> List of node failures
|
||||||
|
<2> List of tasks failures
|
|
@ -104,8 +104,10 @@ include::indices/put_template.asciidoc[]
|
||||||
The Java High Level REST Client supports the following Cluster APIs:
|
The Java High Level REST Client supports the following Cluster APIs:
|
||||||
|
|
||||||
* <<java-rest-high-cluster-put-settings>>
|
* <<java-rest-high-cluster-put-settings>>
|
||||||
|
* <<java-rest-high-cluster-list-tasks>>
|
||||||
|
|
||||||
include::cluster/put_settings.asciidoc[]
|
include::cluster/put_settings.asciidoc[]
|
||||||
|
include::cluster/list_tasks.asciidoc[]
|
||||||
|
|
||||||
== Snapshot APIs
|
== Snapshot APIs
|
||||||
|
|
||||||
|
|
|
@ -21,17 +21,20 @@ package org.elasticsearch.action;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import static org.elasticsearch.ExceptionsHelper.detailedMessage;
|
import static org.elasticsearch.ExceptionsHelper.detailedMessage;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Information about task operation failures
|
* Information about task operation failures
|
||||||
|
@ -39,7 +42,10 @@ import static org.elasticsearch.ExceptionsHelper.detailedMessage;
|
||||||
* The class is final due to serialization limitations
|
* The class is final due to serialization limitations
|
||||||
*/
|
*/
|
||||||
public final class TaskOperationFailure implements Writeable, ToXContentFragment {
|
public final class TaskOperationFailure implements Writeable, ToXContentFragment {
|
||||||
|
private static final String TASK_ID = "task_id";
|
||||||
|
private static final String NODE_ID = "node_id";
|
||||||
|
private static final String STATUS = "status";
|
||||||
|
private static final String REASON = "reason";
|
||||||
private final String nodeId;
|
private final String nodeId;
|
||||||
|
|
||||||
private final long taskId;
|
private final long taskId;
|
||||||
|
@ -48,6 +54,21 @@ public final class TaskOperationFailure implements Writeable, ToXContentFragment
|
||||||
|
|
||||||
private final RestStatus status;
|
private final RestStatus status;
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<TaskOperationFailure, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("task_info", true, constructorObjects -> {
|
||||||
|
int i = 0;
|
||||||
|
String nodeId = (String) constructorObjects[i++];
|
||||||
|
long taskId = (long) constructorObjects[i++];
|
||||||
|
ElasticsearchException reason = (ElasticsearchException) constructorObjects[i];
|
||||||
|
return new TaskOperationFailure(nodeId, taskId, reason);
|
||||||
|
});
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareString(constructorArg(), new ParseField(NODE_ID));
|
||||||
|
PARSER.declareLong(constructorArg(), new ParseField(TASK_ID));
|
||||||
|
PARSER.declareObject(constructorArg(), (parser, c) -> ElasticsearchException.fromXContent(parser), new ParseField(REASON));
|
||||||
|
}
|
||||||
|
|
||||||
public TaskOperationFailure(String nodeId, long taskId, Exception e) {
|
public TaskOperationFailure(String nodeId, long taskId, Exception e) {
|
||||||
this.nodeId = nodeId;
|
this.nodeId = nodeId;
|
||||||
this.taskId = taskId;
|
this.taskId = taskId;
|
||||||
|
@ -98,13 +119,17 @@ public final class TaskOperationFailure implements Writeable, ToXContentFragment
|
||||||
return "[" + nodeId + "][" + taskId + "] failed, reason [" + getReason() + "]";
|
return "[" + nodeId + "][" + taskId + "] failed, reason [" + getReason() + "]";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static TaskOperationFailure fromXContent(XContentParser parser) {
|
||||||
|
return PARSER.apply(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.field("task_id", getTaskId());
|
builder.field(TASK_ID, getTaskId());
|
||||||
builder.field("node_id", getNodeId());
|
builder.field(NODE_ID, getNodeId());
|
||||||
builder.field("status", status.name());
|
builder.field(STATUS, status.name());
|
||||||
if (reason != null) {
|
if (reason != null) {
|
||||||
builder.field("reason");
|
builder.field(REASON);
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
ElasticsearchException.generateThrowableXContent(builder, params, reason);
|
ElasticsearchException.generateThrowableXContent(builder, params, reason);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
@ -112,5 +137,4 @@ public final class TaskOperationFailure implements Writeable, ToXContentFragment
|
||||||
return builder;
|
return builder;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,16 +19,19 @@
|
||||||
|
|
||||||
package org.elasticsearch.action.admin.cluster.node.tasks.list;
|
package org.elasticsearch.action.admin.cluster.node.tasks.list;
|
||||||
|
|
||||||
import org.elasticsearch.action.FailedNodeException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.TaskOperationFailure;
|
import org.elasticsearch.action.TaskOperationFailure;
|
||||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.tasks.TaskId;
|
import org.elasticsearch.tasks.TaskId;
|
||||||
import org.elasticsearch.tasks.TaskInfo;
|
import org.elasticsearch.tasks.TaskInfo;
|
||||||
|
|
||||||
|
@ -40,10 +43,16 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the list of tasks currently running on the nodes
|
* Returns the list of tasks currently running on the nodes
|
||||||
*/
|
*/
|
||||||
public class ListTasksResponse extends BaseTasksResponse implements ToXContentObject {
|
public class ListTasksResponse extends BaseTasksResponse implements ToXContentObject {
|
||||||
|
private static final String TASKS = "tasks";
|
||||||
|
private static final String TASK_FAILURES = "task_failures";
|
||||||
|
private static final String NODE_FAILURES = "node_failures";
|
||||||
|
|
||||||
private List<TaskInfo> tasks;
|
private List<TaskInfo> tasks;
|
||||||
|
|
||||||
|
@ -56,11 +65,31 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
|
||||||
}
|
}
|
||||||
|
|
||||||
public ListTasksResponse(List<TaskInfo> tasks, List<TaskOperationFailure> taskFailures,
|
public ListTasksResponse(List<TaskInfo> tasks, List<TaskOperationFailure> taskFailures,
|
||||||
List<? extends FailedNodeException> nodeFailures) {
|
List<? extends ElasticsearchException> nodeFailures) {
|
||||||
super(taskFailures, nodeFailures);
|
super(taskFailures, nodeFailures);
|
||||||
this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks));
|
this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<ListTasksResponse, Void> PARSER =
|
||||||
|
new ConstructingObjectParser<>("list_tasks_response", true,
|
||||||
|
constructingObjects -> {
|
||||||
|
int i = 0;
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<TaskInfo> tasks = (List<TaskInfo>) constructingObjects[i++];
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<TaskOperationFailure> tasksFailures = (List<TaskOperationFailure>) constructingObjects[i++];
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<ElasticsearchException> nodeFailures = (List<ElasticsearchException>) constructingObjects[i];
|
||||||
|
return new ListTasksResponse(tasks, tasksFailures, nodeFailures);
|
||||||
|
});
|
||||||
|
|
||||||
|
static {
|
||||||
|
PARSER.declareObjectArray(constructorArg(), TaskInfo.PARSER, new ParseField(TASKS));
|
||||||
|
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TaskOperationFailure.fromXContent(p), new ParseField(TASK_FAILURES));
|
||||||
|
PARSER.declareObjectArray(optionalConstructorArg(),
|
||||||
|
(parser, c) -> ElasticsearchException.fromXContent(parser), new ParseField(NODE_FAILURES));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void readFrom(StreamInput in) throws IOException {
|
public void readFrom(StreamInput in) throws IOException {
|
||||||
super.readFrom(in);
|
super.readFrom(in);
|
||||||
|
@ -159,7 +188,7 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
builder.startObject("tasks");
|
builder.startObject(TASKS);
|
||||||
for(TaskInfo task : entry.getValue()) {
|
for(TaskInfo task : entry.getValue()) {
|
||||||
builder.startObject(task.getTaskId().toString());
|
builder.startObject(task.getTaskId().toString());
|
||||||
task.toXContent(builder, params);
|
task.toXContent(builder, params);
|
||||||
|
@ -177,7 +206,7 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
|
||||||
*/
|
*/
|
||||||
public XContentBuilder toXContentGroupedByParents(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContentGroupedByParents(XContentBuilder builder, Params params) throws IOException {
|
||||||
toXContentCommon(builder, params);
|
toXContentCommon(builder, params);
|
||||||
builder.startObject("tasks");
|
builder.startObject(TASKS);
|
||||||
for (TaskGroup group : getTaskGroups()) {
|
for (TaskGroup group : getTaskGroups()) {
|
||||||
builder.field(group.getTaskInfo().getTaskId().toString());
|
builder.field(group.getTaskInfo().getTaskId().toString());
|
||||||
group.toXContent(builder, params);
|
group.toXContent(builder, params);
|
||||||
|
@ -191,7 +220,7 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
|
||||||
*/
|
*/
|
||||||
public XContentBuilder toXContentGroupedByNone(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContentGroupedByNone(XContentBuilder builder, Params params) throws IOException {
|
||||||
toXContentCommon(builder, params);
|
toXContentCommon(builder, params);
|
||||||
builder.startArray("tasks");
|
builder.startArray(TASKS);
|
||||||
for (TaskInfo taskInfo : getTasks()) {
|
for (TaskInfo taskInfo : getTasks()) {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
taskInfo.toXContent(builder, params);
|
taskInfo.toXContent(builder, params);
|
||||||
|
@ -204,14 +233,14 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
toXContentGroupedByParents(builder, params);
|
toXContentGroupedByNone(builder, params);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void toXContentCommon(XContentBuilder builder, Params params) throws IOException {
|
private void toXContentCommon(XContentBuilder builder, Params params) throws IOException {
|
||||||
if (getTaskFailures() != null && getTaskFailures().size() > 0) {
|
if (getTaskFailures() != null && getTaskFailures().size() > 0) {
|
||||||
builder.startArray("task_failures");
|
builder.startArray(TASK_FAILURES);
|
||||||
for (TaskOperationFailure ex : getTaskFailures()){
|
for (TaskOperationFailure ex : getTaskFailures()){
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.value(ex);
|
builder.value(ex);
|
||||||
|
@ -221,8 +250,8 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
|
||||||
}
|
}
|
||||||
|
|
||||||
if (getNodeFailures() != null && getNodeFailures().size() > 0) {
|
if (getNodeFailures() != null && getNodeFailures().size() > 0) {
|
||||||
builder.startArray("node_failures");
|
builder.startArray(NODE_FAILURES);
|
||||||
for (FailedNodeException ex : getNodeFailures()) {
|
for (ElasticsearchException ex : getNodeFailures()) {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
ex.toXContent(builder, params);
|
ex.toXContent(builder, params);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
|
@ -231,6 +260,10 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContentOb
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static ListTasksResponse fromXContent(XContentParser parser) {
|
||||||
|
return PARSER.apply(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return Strings.toString(this);
|
return Strings.toString(this);
|
||||||
|
|
|
@ -42,9 +42,9 @@ import static org.elasticsearch.ExceptionsHelper.rethrowAndSuppress;
|
||||||
*/
|
*/
|
||||||
public class BaseTasksResponse extends ActionResponse {
|
public class BaseTasksResponse extends ActionResponse {
|
||||||
private List<TaskOperationFailure> taskFailures;
|
private List<TaskOperationFailure> taskFailures;
|
||||||
private List<FailedNodeException> nodeFailures;
|
private List<ElasticsearchException> nodeFailures;
|
||||||
|
|
||||||
public BaseTasksResponse(List<TaskOperationFailure> taskFailures, List<? extends FailedNodeException> nodeFailures) {
|
public BaseTasksResponse(List<TaskOperationFailure> taskFailures, List<? extends ElasticsearchException> nodeFailures) {
|
||||||
this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(taskFailures));
|
this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(taskFailures));
|
||||||
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(nodeFailures));
|
this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(nodeFailures));
|
||||||
}
|
}
|
||||||
|
@ -59,7 +59,7 @@ public class BaseTasksResponse extends ActionResponse {
|
||||||
/**
|
/**
|
||||||
* The list of node failures exception.
|
* The list of node failures exception.
|
||||||
*/
|
*/
|
||||||
public List<FailedNodeException> getNodeFailures() {
|
public List<ElasticsearchException> getNodeFailures() {
|
||||||
return nodeFailures;
|
return nodeFailures;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,7 +99,7 @@ public class BaseTasksResponse extends ActionResponse {
|
||||||
exp.writeTo(out);
|
exp.writeTo(out);
|
||||||
}
|
}
|
||||||
out.writeVInt(nodeFailures.size());
|
out.writeVInt(nodeFailures.size());
|
||||||
for (FailedNodeException exp : nodeFailures) {
|
for (ElasticsearchException exp : nodeFailures) {
|
||||||
exp.writeTo(out);
|
exp.writeTo(out);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,18 +103,17 @@ public class RestListTasksAction extends BaseRestHandler {
|
||||||
return new BytesRestResponse(RestStatus.OK, builder);
|
return new BytesRestResponse(RestStatus.OK, builder);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else if ("none".equals(groupBy)) {
|
} else if ("parents".equals(groupBy)) {
|
||||||
return new RestBuilderListener<T>(channel) {
|
return new RestBuilderListener<T>(channel) {
|
||||||
@Override
|
@Override
|
||||||
public RestResponse buildResponse(T response, XContentBuilder builder) throws Exception {
|
public RestResponse buildResponse(T response, XContentBuilder builder) throws Exception {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
response.toXContentGroupedByNone(builder, channel.request());
|
response.toXContentGroupedByParents(builder, channel.request());
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return new BytesRestResponse(RestStatus.OK, builder);
|
return new BytesRestResponse(RestStatus.OK, builder);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
} else if ("none".equals(groupBy)) {
|
||||||
} else if ("parents".equals(groupBy)) {
|
|
||||||
return new RestToXContentListener<>(channel);
|
return new RestToXContentListener<>(channel);
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("[group_by] must be one of [nodes], [parents] or [none] but was [" + groupBy + "]");
|
throw new IllegalArgumentException("[group_by] must be one of [nodes], [parents] or [none] but was [" + groupBy + "]");
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.ObjectParserHelper;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
import org.elasticsearch.common.xcontent.ToXContent.Params;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -214,6 +215,10 @@ public final class TaskInfo implements Writeable, ToXContentFragment {
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static TaskInfo fromXContent(XContentParser parser) {
|
||||||
|
return PARSER.apply(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
public static final ConstructingObjectParser<TaskInfo, Void> PARSER = new ConstructingObjectParser<>(
|
public static final ConstructingObjectParser<TaskInfo, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
"task_info", true, a -> {
|
"task_info", true, a -> {
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.action;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class TaskOperationFailureTests extends AbstractXContentTestCase<TaskOperationFailure> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TaskOperationFailure createTestInstance() {
|
||||||
|
return new TaskOperationFailure(randomAlphaOfLength(5), randomNonNegativeLong(), new IllegalStateException("message"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TaskOperationFailure doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return TaskOperationFailure.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void assertEqualInstances(TaskOperationFailure expectedInstance, TaskOperationFailure newInstance) {
|
||||||
|
assertNotSame(expectedInstance, newInstance);
|
||||||
|
assertThat(newInstance.getNodeId(), equalTo(expectedInstance.getNodeId()));
|
||||||
|
assertThat(newInstance.getTaskId(), equalTo(expectedInstance.getTaskId()));
|
||||||
|
assertThat(newInstance.getStatus(), equalTo(expectedInstance.getStatus()));
|
||||||
|
// XContent loses the original exception and wraps it as a message in Elasticsearch exception
|
||||||
|
assertThat(newInstance.getCause().getMessage(), equalTo("Elasticsearch exception [type=illegal_state_exception, reason=message]"));
|
||||||
|
// getReason returns Exception class and the message
|
||||||
|
assertThat(newInstance.getReason(),
|
||||||
|
equalTo("ElasticsearchException[Elasticsearch exception [type=illegal_state_exception, reason=message]]"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean assertToXContentEquivalence() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,6 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.action.admin.cluster.node.tasks;
|
package org.elasticsearch.action.admin.cluster.node.tasks;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.ResourceNotFoundException;
|
import org.elasticsearch.ResourceNotFoundException;
|
||||||
|
@ -716,7 +717,7 @@ public class TasksIT extends ESIntegTestCase {
|
||||||
.setTimeout(timeValueSeconds(10)).get();
|
.setTimeout(timeValueSeconds(10)).get();
|
||||||
|
|
||||||
// It should finish quickly and without complaint and list the list tasks themselves
|
// It should finish quickly and without complaint and list the list tasks themselves
|
||||||
assertThat(response.getNodeFailures(), emptyCollectionOf(FailedNodeException.class));
|
assertThat(response.getNodeFailures(), emptyCollectionOf(ElasticsearchException.class));
|
||||||
assertThat(response.getTaskFailures(), emptyCollectionOf(TaskOperationFailure.class));
|
assertThat(response.getTaskFailures(), emptyCollectionOf(TaskOperationFailure.class));
|
||||||
assertThat(response.getTasks().size(), greaterThanOrEqualTo(1));
|
assertThat(response.getTasks().size(), greaterThanOrEqualTo(1));
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,18 +19,33 @@
|
||||||
|
|
||||||
package org.elasticsearch.tasks;
|
package org.elasticsearch.tasks;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
import org.elasticsearch.action.FailedNodeException;
|
||||||
|
import org.elasticsearch.action.TaskOperationFailure;
|
||||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
import static java.util.Collections.emptyList;
|
import static java.util.Collections.emptyList;
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
public class ListTasksResponseTests extends ESTestCase {
|
public class ListTasksResponseTests extends AbstractXContentTestCase<ListTasksResponse> {
|
||||||
|
|
||||||
public void testEmptyToString() {
|
public void testEmptyToString() {
|
||||||
assertEquals("{\"tasks\":{}}", new ListTasksResponse().toString());
|
assertEquals("{\"tasks\":[]}", new ListTasksResponse().toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNonEmptyToString() {
|
public void testNonEmptyToString() {
|
||||||
|
@ -38,8 +53,48 @@ public class ListTasksResponseTests extends ESTestCase {
|
||||||
new TaskId("node1", 1), "dummy-type", "dummy-action", "dummy-description", null, 0, 1, true, new TaskId("node1", 0),
|
new TaskId("node1", 1), "dummy-type", "dummy-action", "dummy-description", null, 0, 1, true, new TaskId("node1", 0),
|
||||||
Collections.singletonMap("foo", "bar"));
|
Collections.singletonMap("foo", "bar"));
|
||||||
ListTasksResponse tasksResponse = new ListTasksResponse(singletonList(info), emptyList(), emptyList());
|
ListTasksResponse tasksResponse = new ListTasksResponse(singletonList(info), emptyList(), emptyList());
|
||||||
assertEquals("{\"tasks\":{\"node1:1\":{\"node\":\"node1\",\"id\":1,\"type\":\"dummy-type\",\"action\":\"dummy-action\","
|
assertEquals("{\"tasks\":[{\"node\":\"node1\",\"id\":1,\"type\":\"dummy-type\",\"action\":\"dummy-action\","
|
||||||
+ "\"description\":\"dummy-description\",\"start_time_in_millis\":0,\"running_time_in_nanos\":1,\"cancellable\":true,"
|
+ "\"description\":\"dummy-description\",\"start_time_in_millis\":0,\"running_time_in_nanos\":1,\"cancellable\":true,"
|
||||||
+ "\"parent_task_id\":\"node1:0\",\"headers\":{\"foo\":\"bar\"}}}}", tasksResponse.toString());
|
+ "\"parent_task_id\":\"node1:0\",\"headers\":{\"foo\":\"bar\"}}]}", tasksResponse.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ListTasksResponse createTestInstance() {
|
||||||
|
List<TaskInfo> tasks = new ArrayList<>();
|
||||||
|
for (int i = 0; i < randomInt(10); i++) {
|
||||||
|
tasks.add(TaskInfoTests.randomTaskInfo());
|
||||||
|
}
|
||||||
|
List<TaskOperationFailure> taskFailures = new ArrayList<>();
|
||||||
|
for (int i = 0; i < randomInt(5); i++) {
|
||||||
|
taskFailures.add(new TaskOperationFailure(
|
||||||
|
randomAlphaOfLength(5), randomNonNegativeLong(), new IllegalStateException("message")));
|
||||||
|
}
|
||||||
|
return new ListTasksResponse(tasks, taskFailures, Collections.singletonList(new FailedNodeException("", "message", null)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ListTasksResponse doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
return ListTasksResponse.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void assertEqualInstances(ListTasksResponse expectedInstance, ListTasksResponse newInstance) {
|
||||||
|
assertNotSame(expectedInstance, newInstance);
|
||||||
|
assertThat(newInstance.getTasks(), equalTo(expectedInstance.getTasks()));
|
||||||
|
assertThat(newInstance.getNodeFailures().size(), equalTo(1));
|
||||||
|
for (ElasticsearchException failure : newInstance.getNodeFailures()) {
|
||||||
|
assertThat(failure, notNullValue());
|
||||||
|
assertThat(failure.getMessage(), equalTo("Elasticsearch exception [type=failed_node_exception, reason=message]"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean assertToXContentEquivalence() {
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,156 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.tasks;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Requests;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class TaskInfoTests extends AbstractSerializingTestCase<TaskInfo> {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TaskInfo doParseInstance(XContentParser parser) {
|
||||||
|
return TaskInfo.fromXContent(parser);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TaskInfo createTestInstance() {
|
||||||
|
return randomTaskInfo();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Writeable.Reader<TaskInfo> instanceReader() {
|
||||||
|
return TaskInfo::new;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NamedWriteableRegistry getNamedWriteableRegistry() {
|
||||||
|
return new NamedWriteableRegistry(Collections.singletonList(
|
||||||
|
new NamedWriteableRegistry.Entry(Task.Status.class, RawTaskStatus.NAME, RawTaskStatus::new)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected boolean supportsUnknownFields() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return field -> "status".equals(field) || "headers".equals(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected TaskInfo mutateInstance(TaskInfo info) throws IOException {
|
||||||
|
switch (between(0, 9)) {
|
||||||
|
case 0:
|
||||||
|
TaskId taskId = new TaskId(info.getTaskId().getNodeId() + randomAlphaOfLength(5), info.getTaskId().getId());
|
||||||
|
return new TaskInfo(taskId, info.getType(), info.getAction(), info.getDescription(), info.getStatus(),
|
||||||
|
info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), info.getHeaders());
|
||||||
|
case 1:
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType() + randomAlphaOfLength(5), info.getAction(), info.getDescription(),
|
||||||
|
info.getStatus(), info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(),
|
||||||
|
info.getHeaders());
|
||||||
|
case 2:
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction() + randomAlphaOfLength(5), info.getDescription(),
|
||||||
|
info.getStatus(), info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(),
|
||||||
|
info.getHeaders());
|
||||||
|
case 3:
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription() + randomAlphaOfLength(5),
|
||||||
|
info.getStatus(), info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(),
|
||||||
|
info.getHeaders());
|
||||||
|
case 4:
|
||||||
|
Task.Status newStatus = randomValueOtherThan(info.getStatus(), TaskInfoTests::randomRawTaskStatus);
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), newStatus,
|
||||||
|
info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), info.getHeaders());
|
||||||
|
case 5:
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(),
|
||||||
|
info.getStartTime() + between(1, 100), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(),
|
||||||
|
info.getHeaders());
|
||||||
|
case 6:
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(),
|
||||||
|
info.getStartTime(), info.getRunningTimeNanos() + between(1, 100), info.isCancellable(), info.getParentTaskId(),
|
||||||
|
info.getHeaders());
|
||||||
|
case 7:
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(),
|
||||||
|
info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable() == false, info.getParentTaskId(),
|
||||||
|
info.getHeaders());
|
||||||
|
case 8:
|
||||||
|
TaskId parentId = new TaskId(info.getParentTaskId().getNodeId() + randomAlphaOfLength(5), info.getParentTaskId().getId());
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(),
|
||||||
|
info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), parentId, info.getHeaders());
|
||||||
|
case 9:
|
||||||
|
Map<String, String> headers = info.getHeaders();
|
||||||
|
if (headers == null) {
|
||||||
|
headers = new HashMap<>(1);
|
||||||
|
} else {
|
||||||
|
headers = new HashMap<>(info.getHeaders());
|
||||||
|
}
|
||||||
|
headers.put(randomAlphaOfLength(15), randomAlphaOfLength(15));
|
||||||
|
return new TaskInfo(info.getTaskId(), info.getType(), info.getAction(), info.getDescription(), info.getStatus(),
|
||||||
|
info.getStartTime(), info.getRunningTimeNanos(), info.isCancellable(), info.getParentTaskId(), headers);
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static TaskInfo randomTaskInfo() {
|
||||||
|
TaskId taskId = randomTaskId();
|
||||||
|
String type = randomAlphaOfLength(5);
|
||||||
|
String action = randomAlphaOfLength(5);
|
||||||
|
Task.Status status = randomBoolean() ? randomRawTaskStatus() : null;
|
||||||
|
String description = randomBoolean() ? randomAlphaOfLength(5) : null;
|
||||||
|
long startTime = randomLong();
|
||||||
|
long runningTimeNanos = randomLong();
|
||||||
|
boolean cancellable = randomBoolean();
|
||||||
|
TaskId parentTaskId = randomBoolean() ? TaskId.EMPTY_TASK_ID : randomTaskId();
|
||||||
|
Map<String, String> headers = randomBoolean() ?
|
||||||
|
Collections.emptyMap() :
|
||||||
|
Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5));
|
||||||
|
return new TaskInfo(taskId, type, action, description, status, startTime, runningTimeNanos, cancellable, parentTaskId, headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static TaskId randomTaskId() {
|
||||||
|
return new TaskId(randomAlphaOfLength(5), randomLong());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static RawTaskStatus randomRawTaskStatus() {
|
||||||
|
try (XContentBuilder builder = XContentBuilder.builder(Requests.INDEX_CONTENT_TYPE.xContent())) {
|
||||||
|
builder.startObject();
|
||||||
|
int fields = between(0, 10);
|
||||||
|
for (int f = 0; f < fields; f++) {
|
||||||
|
builder.field(randomAlphaOfLength(5), randomAlphaOfLength(5));
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
return new RawTaskStatus(BytesReference.bytes(builder));
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new IllegalStateException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.tasks;
|
package org.elasticsearch.tasks;
|
||||||
|
|
||||||
import org.elasticsearch.client.Requests;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
|
@ -37,6 +35,8 @@ import java.util.Collections;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
|
||||||
|
import static org.elasticsearch.tasks.TaskInfoTests.randomTaskInfo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Round trip tests for {@link TaskResult} and those classes that it includes like {@link TaskInfo} and {@link RawTaskStatus}.
|
* Round trip tests for {@link TaskResult} and those classes that it includes like {@link TaskInfo} and {@link RawTaskStatus}.
|
||||||
*/
|
*/
|
||||||
|
@ -125,37 +125,6 @@ public class TaskResultTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static TaskInfo randomTaskInfo() throws IOException {
|
|
||||||
TaskId taskId = randomTaskId();
|
|
||||||
String type = randomAlphaOfLength(5);
|
|
||||||
String action = randomAlphaOfLength(5);
|
|
||||||
Task.Status status = randomBoolean() ? randomRawTaskStatus() : null;
|
|
||||||
String description = randomBoolean() ? randomAlphaOfLength(5) : null;
|
|
||||||
long startTime = randomLong();
|
|
||||||
long runningTimeNanos = randomLong();
|
|
||||||
boolean cancellable = randomBoolean();
|
|
||||||
TaskId parentTaskId = randomBoolean() ? TaskId.EMPTY_TASK_ID : randomTaskId();
|
|
||||||
Map<String, String> headers =
|
|
||||||
randomBoolean() ? Collections.emptyMap() : Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5));
|
|
||||||
return new TaskInfo(taskId, type, action, description, status, startTime, runningTimeNanos, cancellable, parentTaskId, headers);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static TaskId randomTaskId() {
|
|
||||||
return new TaskId(randomAlphaOfLength(5), randomLong());
|
|
||||||
}
|
|
||||||
|
|
||||||
private static RawTaskStatus randomRawTaskStatus() throws IOException {
|
|
||||||
try (XContentBuilder builder = XContentBuilder.builder(Requests.INDEX_CONTENT_TYPE.xContent())) {
|
|
||||||
builder.startObject();
|
|
||||||
int fields = between(0, 10);
|
|
||||||
for (int f = 0; f < fields; f++) {
|
|
||||||
builder.field(randomAlphaOfLength(5), randomAlphaOfLength(5));
|
|
||||||
}
|
|
||||||
builder.endObject();
|
|
||||||
return new RawTaskStatus(BytesReference.bytes(builder));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static ToXContent randomTaskResponse() {
|
private static ToXContent randomTaskResponse() {
|
||||||
Map<String, String> result = new TreeMap<>();
|
Map<String, String> result = new TreeMap<>();
|
||||||
int fields = between(0, 10);
|
int fields = between(0, 10);
|
||||||
|
|
|
@ -659,20 +659,20 @@ public abstract class ESTestCase extends LuceneTestCase {
|
||||||
return RandomizedTest.randomRealisticUnicodeOfCodepointLength(codePoints);
|
return RandomizedTest.randomRealisticUnicodeOfCodepointLength(codePoints);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull, boolean allowEmpty) {
|
public static String[] generateRandomStringArray(int maxArraySize, int stringSize, boolean allowNull, boolean allowEmpty) {
|
||||||
if (allowNull && random().nextBoolean()) {
|
if (allowNull && random().nextBoolean()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
int arraySize = randomIntBetween(allowEmpty ? 0 : 1, maxArraySize);
|
int arraySize = randomIntBetween(allowEmpty ? 0 : 1, maxArraySize);
|
||||||
String[] array = new String[arraySize];
|
String[] array = new String[arraySize];
|
||||||
for (int i = 0; i < arraySize; i++) {
|
for (int i = 0; i < arraySize; i++) {
|
||||||
array[i] = RandomStrings.randomAsciiOfLength(random(), maxStringSize);
|
array[i] = RandomStrings.randomAsciiOfLength(random(), stringSize);
|
||||||
}
|
}
|
||||||
return array;
|
return array;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String[] generateRandomStringArray(int maxArraySize, int maxStringSize, boolean allowNull) {
|
public static String[] generateRandomStringArray(int maxArraySize, int stringSize, boolean allowNull) {
|
||||||
return generateRandomStringArray(maxArraySize, maxStringSize, allowNull, true);
|
return generateRandomStringArray(maxArraySize, stringSize, allowNull, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final String[] TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m", "micros", "nanos"};
|
private static final String[] TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m", "micros", "nanos"};
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.core.ml.action;
|
package org.elasticsearch.xpack.core.ml.action;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.Action;
|
import org.elasticsearch.action.Action;
|
||||||
import org.elasticsearch.action.ActionRequestBuilder;
|
import org.elasticsearch.action.ActionRequestBuilder;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.FailedNodeException;
|
|
||||||
import org.elasticsearch.action.TaskOperationFailure;
|
import org.elasticsearch.action.TaskOperationFailure;
|
||||||
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
|
||||||
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
|
||||||
|
@ -297,7 +297,7 @@ public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJo
|
||||||
this.jobsStats = jobsStats;
|
this.jobsStats = jobsStats;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Response(List<TaskOperationFailure> taskFailures, List<? extends FailedNodeException> nodeFailures,
|
public Response(List<TaskOperationFailure> taskFailures, List<? extends ElasticsearchException> nodeFailures,
|
||||||
QueryPage<JobStats> jobsStats) {
|
QueryPage<JobStats> jobsStats) {
|
||||||
super(taskFailures, nodeFailures);
|
super(taskFailures, nodeFailures);
|
||||||
this.jobsStats = jobsStats;
|
this.jobsStats = jobsStats;
|
||||||
|
|
Loading…
Reference in New Issue