Merge branch '7.x' into enrich-7.x

This commit is contained in:
James Baiera 2019-05-30 16:13:06 -04:00
commit 215170b6c3
96 changed files with 768 additions and 477 deletions

View File

@ -48,9 +48,10 @@ final class CcrRequestConverters {
.addPathPartAsIs("_ccr", "follow") .addPathPartAsIs("_ccr", "follow")
.build(); .build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withWaitForActiveShards(putFollowRequest.waitForActiveShards()); parameters.withWaitForActiveShards(putFollowRequest.waitForActiveShards());
request.setEntity(createEntity(putFollowRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(putFollowRequest, REQUEST_BODY_CONTENT_TYPE));
request.addParameters(parameters.asMap());
return request; return request;
} }

View File

@ -36,22 +36,21 @@ final class ClusterRequestConverters {
static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest) throws IOException { static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest) throws IOException {
Request request = new Request(HttpPut.METHOD_NAME, "/_cluster/settings"); Request request = new Request(HttpPut.METHOD_NAME, "/_cluster/settings");
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(clusterUpdateSettingsRequest.timeout()); parameters.withTimeout(clusterUpdateSettingsRequest.timeout());
parameters.withMasterTimeout(clusterUpdateSettingsRequest.masterNodeTimeout()); parameters.withMasterTimeout(clusterUpdateSettingsRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(clusterUpdateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(clusterUpdateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRequest) throws IOException { static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRequest) throws IOException {
Request request = new Request(HttpGet.METHOD_NAME, "/_cluster/settings"); Request request = new Request(HttpGet.METHOD_NAME, "/_cluster/settings");
RequestConverters.Params parameters = new RequestConverters.Params();
RequestConverters.Params parameters = new RequestConverters.Params(request);
parameters.withLocal(clusterGetSettingsRequest.local()); parameters.withLocal(clusterGetSettingsRequest.local());
parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults()); parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults());
parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout()); parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -63,7 +62,7 @@ final class ClusterRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
new RequestConverters.Params(request) RequestConverters.Params params = new RequestConverters.Params()
.withWaitForStatus(healthRequest.waitForStatus()) .withWaitForStatus(healthRequest.waitForStatus())
.withWaitForNoRelocatingShards(healthRequest.waitForNoRelocatingShards()) .withWaitForNoRelocatingShards(healthRequest.waitForNoRelocatingShards())
.withWaitForNoInitializingShards(healthRequest.waitForNoInitializingShards()) .withWaitForNoInitializingShards(healthRequest.waitForNoInitializingShards())
@ -74,6 +73,7 @@ final class ClusterRequestConverters {
.withMasterTimeout(healthRequest.masterNodeTimeout()) .withMasterTimeout(healthRequest.masterNodeTimeout())
.withLocal(healthRequest.local()) .withLocal(healthRequest.local())
.withLevel(healthRequest.level()); .withLevel(healthRequest.level());
request.addParameters(params.asMap());
return request; return request;
} }
} }

View File

@ -82,10 +82,11 @@ final class DataFrameRequestConverters {
.addPathPartAsIs("_start") .addPathPartAsIs("_start")
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (startRequest.getTimeout() != null) { if (startRequest.getTimeout() != null) {
params.withTimeout(startRequest.getTimeout()); params.withTimeout(startRequest.getTimeout());
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -96,13 +97,14 @@ final class DataFrameRequestConverters {
.addPathPartAsIs("_stop") .addPathPartAsIs("_stop")
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (stopRequest.getWaitForCompletion() != null) { if (stopRequest.getWaitForCompletion() != null) {
params.withWaitForCompletion(stopRequest.getWaitForCompletion()); params.withWaitForCompletion(stopRequest.getWaitForCompletion());
} }
if (stopRequest.getTimeout() != null) { if (stopRequest.getTimeout() != null) {
params.withTimeout(stopRequest.getTimeout()); params.withTimeout(stopRequest.getTimeout());
} }
request.addParameters(params.asMap());
return request; return request;
} }

View File

@ -44,9 +44,10 @@ final class IndexLifecycleRequestConverters {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ilm/policy") String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ilm/policy")
.addCommaSeparatedPathParts(getLifecyclePolicyRequest.getPolicyNames()).build(); .addCommaSeparatedPathParts(getLifecyclePolicyRequest.getPolicyNames()).build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(getLifecyclePolicyRequest.masterNodeTimeout()); params.withMasterTimeout(getLifecyclePolicyRequest.masterNodeTimeout());
params.withTimeout(getLifecyclePolicyRequest.timeout()); params.withTimeout(getLifecyclePolicyRequest.timeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -56,9 +57,10 @@ final class IndexLifecycleRequestConverters {
.addPathPartAsIs(putLifecycleRequest.getName()) .addPathPartAsIs(putLifecycleRequest.getName())
.build(); .build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putLifecycleRequest.masterNodeTimeout()); params.withMasterTimeout(putLifecycleRequest.masterNodeTimeout());
params.withTimeout(putLifecycleRequest.timeout()); params.withTimeout(putLifecycleRequest.timeout());
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(putLifecycleRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(putLifecycleRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -69,9 +71,10 @@ final class IndexLifecycleRequestConverters {
.addPathPartAsIs("_ilm/policy") .addPathPartAsIs("_ilm/policy")
.addPathPartAsIs(deleteLifecyclePolicyRequest.getLifecyclePolicy()) .addPathPartAsIs(deleteLifecyclePolicyRequest.getLifecyclePolicy())
.build()); .build());
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(deleteLifecyclePolicyRequest.masterNodeTimeout()); params.withMasterTimeout(deleteLifecyclePolicyRequest.masterNodeTimeout());
params.withTimeout(deleteLifecyclePolicyRequest.timeout()); params.withTimeout(deleteLifecyclePolicyRequest.timeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -83,9 +86,10 @@ final class IndexLifecycleRequestConverters {
.addCommaSeparatedPathParts(indices) .addCommaSeparatedPathParts(indices)
.addPathPartAsIs("_ilm", "remove") .addPathPartAsIs("_ilm", "remove")
.build()); .build());
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(removePolicyRequest.indicesOptions()); params.withIndicesOptions(removePolicyRequest.indicesOptions());
params.withMasterTimeout(removePolicyRequest.masterNodeTimeout()); params.withMasterTimeout(removePolicyRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -95,9 +99,10 @@ final class IndexLifecycleRequestConverters {
.addPathPartAsIs("_ilm") .addPathPartAsIs("_ilm")
.addPathPartAsIs("start") .addPathPartAsIs("start")
.build()); .build());
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(startILMRequest.masterNodeTimeout()); params.withMasterTimeout(startILMRequest.masterNodeTimeout());
params.withTimeout(startILMRequest.timeout()); params.withTimeout(startILMRequest.timeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -107,9 +112,10 @@ final class IndexLifecycleRequestConverters {
.addPathPartAsIs("_ilm") .addPathPartAsIs("_ilm")
.addPathPartAsIs("stop") .addPathPartAsIs("stop")
.build()); .build());
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(stopILMRequest.masterNodeTimeout()); params.withMasterTimeout(stopILMRequest.masterNodeTimeout());
params.withTimeout(stopILMRequest.timeout()); params.withTimeout(stopILMRequest.timeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -119,9 +125,10 @@ final class IndexLifecycleRequestConverters {
.addPathPartAsIs("_ilm") .addPathPartAsIs("_ilm")
.addPathPartAsIs("status") .addPathPartAsIs("status")
.build()); .build());
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(lifecycleManagementStatusRequest.masterNodeTimeout()); params.withMasterTimeout(lifecycleManagementStatusRequest.masterNodeTimeout());
params.withTimeout(lifecycleManagementStatusRequest.timeout()); params.withTimeout(lifecycleManagementStatusRequest.timeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -132,9 +139,10 @@ final class IndexLifecycleRequestConverters {
.addPathPartAsIs("_ilm") .addPathPartAsIs("_ilm")
.addPathPartAsIs("explain") .addPathPartAsIs("explain")
.build()); .build());
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(explainLifecycleRequest.indicesOptions()); params.withIndicesOptions(explainLifecycleRequest.indicesOptions());
params.withMasterTimeout(explainLifecycleRequest.masterNodeTimeout()); params.withMasterTimeout(explainLifecycleRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -145,9 +153,10 @@ final class IndexLifecycleRequestConverters {
.addPathPartAsIs("_ilm") .addPathPartAsIs("_ilm")
.addPathPartAsIs("retry") .addPathPartAsIs("retry")
.build()); .build());
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(retryLifecyclePolicyRequest.masterNodeTimeout()); params.withMasterTimeout(retryLifecyclePolicyRequest.masterNodeTimeout());
params.withTimeout(retryLifecyclePolicyRequest.timeout()); params.withTimeout(retryLifecyclePolicyRequest.timeout());
request.addParameters(params.asMap());
return request; return request;
} }
} }

View File

@ -67,10 +67,11 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices()); String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices());
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(deleteIndexRequest.timeout()); parameters.withTimeout(deleteIndexRequest.timeout());
parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout()); parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(deleteIndexRequest.indicesOptions()); parameters.withIndicesOptions(deleteIndexRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -78,11 +79,12 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(openIndexRequest.indices(), "_open"); String endpoint = RequestConverters.endpoint(openIndexRequest.indices(), "_open");
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(openIndexRequest.timeout()); parameters.withTimeout(openIndexRequest.timeout());
parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout()); parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout());
parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards()); parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards());
parameters.withIndicesOptions(openIndexRequest.indicesOptions()); parameters.withIndicesOptions(openIndexRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -90,10 +92,11 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(closeIndexRequest.indices(), "_close"); String endpoint = RequestConverters.endpoint(closeIndexRequest.indices(), "_close");
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(closeIndexRequest.timeout()); parameters.withTimeout(closeIndexRequest.timeout());
parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout()); parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(closeIndexRequest.indicesOptions()); parameters.withIndicesOptions(closeIndexRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -102,11 +105,11 @@ final class IndicesRequestConverters {
.addPathPart(createIndexRequest.index()).build(); .addPathPart(createIndexRequest.index()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(createIndexRequest.timeout()); parameters.withTimeout(createIndexRequest.timeout());
parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout());
parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -116,12 +119,12 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(createIndexRequest.indices()); String endpoint = RequestConverters.endpoint(createIndexRequest.indices());
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(createIndexRequest.timeout()); parameters.withTimeout(createIndexRequest.timeout());
parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout());
parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards());
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -129,10 +132,10 @@ final class IndicesRequestConverters {
static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException { static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_aliases"); Request request = new Request(HttpPost.METHOD_NAME, "/_aliases");
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(indicesAliasesRequest.timeout()); parameters.withTimeout(indicesAliasesRequest.timeout());
parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout()); parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -141,9 +144,10 @@ final class IndicesRequestConverters {
static Request putMapping(PutMappingRequest putMappingRequest) throws IOException { static Request putMapping(PutMappingRequest putMappingRequest) throws IOException {
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping")); Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(putMappingRequest.timeout()); parameters.withTimeout(putMappingRequest.timeout());
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -162,11 +166,11 @@ final class IndicesRequestConverters {
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(),
"_mapping", putMappingRequest.type())); "_mapping", putMappingRequest.type()));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(putMappingRequest.timeout()); parameters.withTimeout(putMappingRequest.timeout());
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -176,11 +180,11 @@ final class IndicesRequestConverters {
Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping")); Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
parameters.withLocal(getMappingsRequest.local()); parameters.withLocal(getMappingsRequest.local());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -191,12 +195,12 @@ final class IndicesRequestConverters {
Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping", types)); Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping", types));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
parameters.withLocal(getMappingsRequest.local()); parameters.withLocal(getMappingsRequest.local());
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -213,11 +217,11 @@ final class IndicesRequestConverters {
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions());
parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults());
parameters.withLocal(getFieldMappingsRequest.local()); parameters.withLocal(getFieldMappingsRequest.local());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -234,12 +238,12 @@ final class IndicesRequestConverters {
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions());
parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults());
parameters.withLocal(getFieldMappingsRequest.local()); parameters.withLocal(getFieldMappingsRequest.local());
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -247,8 +251,9 @@ final class IndicesRequestConverters {
String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh")); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(refreshRequest.indicesOptions()); parameters.withIndicesOptions(refreshRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -256,18 +261,20 @@ final class IndicesRequestConverters {
String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices(); String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush")); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(flushRequest.indicesOptions()); parameters.withIndicesOptions(flushRequest.indicesOptions());
parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing())); parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing()));
parameters.putParam("force", Boolean.toString(flushRequest.force())); parameters.putParam("force", Boolean.toString(flushRequest.force()));
request.addParameters(parameters.asMap());
return request; return request;
} }
static Request flushSynced(SyncedFlushRequest syncedFlushRequest) { static Request flushSynced(SyncedFlushRequest syncedFlushRequest) {
String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices(); String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush/synced")); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush/synced"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(syncedFlushRequest.indicesOptions()); parameters.withIndicesOptions(syncedFlushRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -275,11 +282,12 @@ final class IndicesRequestConverters {
String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices(); String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge")); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(forceMergeRequest.indicesOptions()); parameters.withIndicesOptions(forceMergeRequest.indicesOptions());
parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments())); parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments()));
parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes())); parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes()));
parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush())); parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush()));
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -287,12 +295,13 @@ final class IndicesRequestConverters {
String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices(); String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear")); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions()); parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions());
parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache())); parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache()));
parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache())); parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache()));
parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache())); parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache()));
parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields())); parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields()));
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -306,9 +315,10 @@ final class IndicesRequestConverters {
Request request = new Request(HttpHead.METHOD_NAME, RequestConverters.endpoint(indices, "_alias", aliases)); Request request = new Request(HttpHead.METHOD_NAME, RequestConverters.endpoint(indices, "_alias", aliases));
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getAliasesRequest.indicesOptions()); params.withIndicesOptions(getAliasesRequest.indicesOptions());
params.withLocal(getAliasesRequest.local()); params.withLocal(getAliasesRequest.local());
request.addParameters(params.asMap());
return request; return request;
} }
@ -332,11 +342,11 @@ final class IndicesRequestConverters {
.addPathPart(resizeRequest.getTargetIndexRequest().index()).build(); .addPathPart(resizeRequest.getTargetIndexRequest().index()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(resizeRequest.timeout()); params.withTimeout(resizeRequest.timeout());
params.withMasterTimeout(resizeRequest.masterNodeTimeout()); params.withMasterTimeout(resizeRequest.masterNodeTimeout());
params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards()); params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards());
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -346,14 +356,14 @@ final class IndicesRequestConverters {
.addPathPart(rolloverRequest.getNewIndexName()).build(); .addPathPart(rolloverRequest.getNewIndexName()).build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(rolloverRequest.timeout()); params.withTimeout(rolloverRequest.timeout());
params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); params.withMasterTimeout(rolloverRequest.masterNodeTimeout());
params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards());
if (rolloverRequest.isDryRun()) { if (rolloverRequest.isDryRun()) {
params.putParam("dry_run", Boolean.TRUE.toString()); params.putParam("dry_run", Boolean.TRUE.toString());
} }
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -364,7 +374,7 @@ final class IndicesRequestConverters {
.addPathPart(rolloverRequest.getNewIndexName()).build(); .addPathPart(rolloverRequest.getNewIndexName()).build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(rolloverRequest.timeout()); params.withTimeout(rolloverRequest.timeout());
params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); params.withMasterTimeout(rolloverRequest.masterNodeTimeout());
params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards());
@ -373,7 +383,7 @@ final class IndicesRequestConverters {
} }
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
request.addParameters(params.asMap());
return request; return request;
} }
@ -384,12 +394,12 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(indices, "_settings", names); String endpoint = RequestConverters.endpoint(indices, "_settings", names);
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getSettingsRequest.indicesOptions()); params.withIndicesOptions(getSettingsRequest.indicesOptions());
params.withLocal(getSettingsRequest.local()); params.withLocal(getSettingsRequest.local());
params.withIncludeDefaults(getSettingsRequest.includeDefaults()); params.withIncludeDefaults(getSettingsRequest.includeDefaults());
params.withMasterTimeout(getSettingsRequest.masterNodeTimeout()); params.withMasterTimeout(getSettingsRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -404,14 +414,14 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(indices); String endpoint = RequestConverters.endpoint(indices);
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getIndexRequest.indicesOptions()); params.withIndicesOptions(getIndexRequest.indicesOptions());
params.withLocal(getIndexRequest.local()); params.withLocal(getIndexRequest.local());
params.withIncludeDefaults(getIndexRequest.includeDefaults()); params.withIncludeDefaults(getIndexRequest.includeDefaults());
params.withHuman(getIndexRequest.humanReadable()); params.withHuman(getIndexRequest.humanReadable());
params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.addParameters(params.asMap());
return request; return request;
} }
@ -421,13 +431,13 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(indices); String endpoint = RequestConverters.endpoint(indices);
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getIndexRequest.indicesOptions()); params.withIndicesOptions(getIndexRequest.indicesOptions());
params.withLocal(getIndexRequest.local()); params.withLocal(getIndexRequest.local());
params.withIncludeDefaults(getIndexRequest.includeDefaults()); params.withIncludeDefaults(getIndexRequest.includeDefaults());
params.withHuman(getIndexRequest.humanReadable()); params.withHuman(getIndexRequest.humanReadable());
params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -444,12 +454,13 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), ""); String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), "");
Request request = new Request(HttpHead.METHOD_NAME, endpoint); Request request = new Request(HttpHead.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(getIndexRequest.local()); params.withLocal(getIndexRequest.local());
params.withHuman(getIndexRequest.humanReadable()); params.withHuman(getIndexRequest.humanReadable());
params.withIndicesOptions(getIndexRequest.indicesOptions()); params.withIndicesOptions(getIndexRequest.indicesOptions());
params.withIncludeDefaults(getIndexRequest.includeDefaults()); params.withIncludeDefaults(getIndexRequest.includeDefaults());
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.addParameters(params.asMap());
return request; return request;
} }
@ -461,11 +472,12 @@ final class IndicesRequestConverters {
String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), ""); String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), "");
Request request = new Request(HttpHead.METHOD_NAME, endpoint); Request request = new Request(HttpHead.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(getIndexRequest.local()); params.withLocal(getIndexRequest.local());
params.withHuman(getIndexRequest.humanReadable()); params.withHuman(getIndexRequest.humanReadable());
params.withIndicesOptions(getIndexRequest.indicesOptions()); params.withIndicesOptions(getIndexRequest.indicesOptions());
params.withIncludeDefaults(getIndexRequest.includeDefaults()); params.withIncludeDefaults(getIndexRequest.includeDefaults());
request.addParameters(params.asMap());
return request; return request;
} }
@ -473,12 +485,12 @@ final class IndicesRequestConverters {
String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices(); String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices();
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(indices, "_settings")); Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(indices, "_settings"));
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(updateSettingsRequest.timeout()); parameters.withTimeout(updateSettingsRequest.timeout());
parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout()); parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout());
parameters.withIndicesOptions(updateSettingsRequest.indicesOptions()); parameters.withIndicesOptions(updateSettingsRequest.indicesOptions());
parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting()); parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(updateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(updateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -493,7 +505,7 @@ final class IndicesRequestConverters {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
.addPathPart(putIndexTemplateRequest.name()).build(); .addPathPart(putIndexTemplateRequest.name()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
if (putIndexTemplateRequest.create()) { if (putIndexTemplateRequest.create()) {
params.putParam("create", Boolean.TRUE.toString()); params.putParam("create", Boolean.TRUE.toString());
@ -502,6 +514,7 @@ final class IndicesRequestConverters {
params.putParam("cause", putIndexTemplateRequest.cause()); params.putParam("cause", putIndexTemplateRequest.cause());
} }
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -510,7 +523,7 @@ final class IndicesRequestConverters {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
.addPathPart(putIndexTemplateRequest.name()).build(); .addPathPart(putIndexTemplateRequest.name()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
if (putIndexTemplateRequest.create()) { if (putIndexTemplateRequest.create()) {
params.putParam("create", Boolean.TRUE.toString()); params.putParam("create", Boolean.TRUE.toString());
@ -518,6 +531,7 @@ final class IndicesRequestConverters {
if (Strings.hasText(putIndexTemplateRequest.cause())) { if (Strings.hasText(putIndexTemplateRequest.cause())) {
params.putParam("cause", putIndexTemplateRequest.cause()); params.putParam("cause", putIndexTemplateRequest.cause());
} }
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -527,11 +541,12 @@ final class IndicesRequestConverters {
String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types();
String endpoint = RequestConverters.endpoint(indices, types, "_validate/query"); String endpoint = RequestConverters.endpoint(indices, types, "_validate/query");
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(validateQueryRequest.indicesOptions()); params.withIndicesOptions(validateQueryRequest.indicesOptions());
params.putParam("explain", Boolean.toString(validateQueryRequest.explain())); params.putParam("explain", Boolean.toString(validateQueryRequest.explain()));
params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards())); params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards()));
params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite())); params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(validateQueryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(validateQueryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -541,9 +556,10 @@ final class IndicesRequestConverters {
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
String endpoint = RequestConverters.endpoint(indices, "_alias", aliases); String endpoint = RequestConverters.endpoint(indices, "_alias", aliases);
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getAliasesRequest.indicesOptions()); params.withIndicesOptions(getAliasesRequest.indicesOptions());
params.withLocal(getAliasesRequest.local()); params.withLocal(getAliasesRequest.local());
request.addParameters(params.asMap());
return request; return request;
} }
@ -562,12 +578,13 @@ final class IndicesRequestConverters {
.addCommaSeparatedPathParts(getIndexTemplatesRequest.names()) .addCommaSeparatedPathParts(getIndexTemplatesRequest.names())
.build(); .build();
final Request request = new Request(HttpGet.METHOD_NAME, endpoint); final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params(request); final RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(getIndexTemplatesRequest.isLocal()); params.withLocal(getIndexTemplatesRequest.isLocal());
params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout());
if (includeTypeName) { if (includeTypeName) {
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -577,9 +594,10 @@ final class IndicesRequestConverters {
.addCommaSeparatedPathParts(indexTemplatesExistRequest.names()) .addCommaSeparatedPathParts(indexTemplatesExistRequest.names())
.build(); .build();
final Request request = new Request(HttpHead.METHOD_NAME, endpoint); final Request request = new Request(HttpHead.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params(request); final RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(indexTemplatesExistRequest.isLocal()); params.withLocal(indexTemplatesExistRequest.isLocal());
params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout()); params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -598,22 +616,24 @@ final class IndicesRequestConverters {
static Request freezeIndex(FreezeIndexRequest freezeIndexRequest) { static Request freezeIndex(FreezeIndexRequest freezeIndexRequest) {
String endpoint = RequestConverters.endpoint(freezeIndexRequest.getIndices(), "_freeze"); String endpoint = RequestConverters.endpoint(freezeIndexRequest.getIndices(), "_freeze");
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(freezeIndexRequest.timeout()); parameters.withTimeout(freezeIndexRequest.timeout());
parameters.withMasterTimeout(freezeIndexRequest.masterNodeTimeout()); parameters.withMasterTimeout(freezeIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(freezeIndexRequest.indicesOptions()); parameters.withIndicesOptions(freezeIndexRequest.indicesOptions());
parameters.withWaitForActiveShards(freezeIndexRequest.getWaitForActiveShards()); parameters.withWaitForActiveShards(freezeIndexRequest.getWaitForActiveShards());
request.addParameters(parameters.asMap());
return request; return request;
} }
static Request unfreezeIndex(UnfreezeIndexRequest unfreezeIndexRequest) { static Request unfreezeIndex(UnfreezeIndexRequest unfreezeIndexRequest) {
String endpoint = RequestConverters.endpoint(unfreezeIndexRequest.getIndices(), "_unfreeze"); String endpoint = RequestConverters.endpoint(unfreezeIndexRequest.getIndices(), "_unfreeze");
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(unfreezeIndexRequest.timeout()); parameters.withTimeout(unfreezeIndexRequest.timeout());
parameters.withMasterTimeout(unfreezeIndexRequest.masterNodeTimeout()); parameters.withMasterTimeout(unfreezeIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(unfreezeIndexRequest.indicesOptions()); parameters.withIndicesOptions(unfreezeIndexRequest.indicesOptions());
parameters.withWaitForActiveShards(unfreezeIndexRequest.getWaitForActiveShards()); parameters.withWaitForActiveShards(unfreezeIndexRequest.getWaitForActiveShards());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -621,8 +641,9 @@ final class IndicesRequestConverters {
String name = deleteIndexTemplateRequest.name(); String name = deleteIndexTemplateRequest.name();
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addPathPart(name).build(); String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addPathPart(name).build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout()); params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
} }

View File

@ -41,8 +41,9 @@ final class IngestRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout()); parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -53,10 +54,10 @@ final class IngestRequestConverters {
.build(); .build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(putPipelineRequest.timeout()); parameters.withTimeout(putPipelineRequest.timeout());
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout()); parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -68,10 +69,10 @@ final class IngestRequestConverters {
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(deletePipelineRequest.timeout()); parameters.withTimeout(deletePipelineRequest.timeout());
parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout()); parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -83,8 +84,9 @@ final class IngestRequestConverters {
builder.addPathPartAsIs("_simulate"); builder.addPathPartAsIs("_simulate");
String endpoint = builder.build(); String endpoint = builder.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose())); params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose()));
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(simulatePipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(simulatePipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }

View File

@ -36,12 +36,13 @@ final class LicenseRequestConverters {
static Request putLicense(PutLicenseRequest putLicenseRequest) { static Request putLicense(PutLicenseRequest putLicenseRequest) {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build(); String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(putLicenseRequest.timeout()); parameters.withTimeout(putLicenseRequest.timeout());
parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout());
if (putLicenseRequest.isAcknowledge()) { if (putLicenseRequest.isAcknowledge()) {
parameters.putParam("acknowledge", "true"); parameters.putParam("acknowledge", "true");
} }
request.addParameters(parameters.asMap());
request.setJsonEntity(putLicenseRequest.getLicenseDefinition()); request.setJsonEntity(putLicenseRequest.getLicenseDefinition());
return request; return request;
} }
@ -49,17 +50,19 @@ final class LicenseRequestConverters {
static Request getLicense(GetLicenseRequest getLicenseRequest) { static Request getLicense(GetLicenseRequest getLicenseRequest) {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build(); String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withLocal(getLicenseRequest.isLocal()); parameters.withLocal(getLicenseRequest.isLocal());
request.addParameters(parameters.asMap());
return request; return request;
} }
static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) { static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build(); String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(deleteLicenseRequest.timeout()); parameters.withTimeout(deleteLicenseRequest.timeout());
parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout()); parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -67,11 +70,12 @@ final class LicenseRequestConverters {
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license", "start_trial").build(); final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license", "start_trial").build();
final Request request = new Request(HttpPost.METHOD_NAME, endpoint); final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.putParam("acknowledge", Boolean.toString(startTrialRequest.isAcknowledge())); parameters.putParam("acknowledge", Boolean.toString(startTrialRequest.isAcknowledge()));
if (startTrialRequest.getLicenseType() != null) { if (startTrialRequest.getLicenseType() != null) {
parameters.putParam("type", startTrialRequest.getLicenseType()); parameters.putParam("type", startTrialRequest.getLicenseType());
} }
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -80,12 +84,13 @@ final class LicenseRequestConverters {
.addPathPartAsIs("_license", "start_basic") .addPathPartAsIs("_license", "start_basic")
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(startBasicRequest.timeout()); parameters.withTimeout(startBasicRequest.timeout());
parameters.withMasterTimeout(startBasicRequest.masterNodeTimeout()); parameters.withMasterTimeout(startBasicRequest.masterNodeTimeout());
if (startBasicRequest.isAcknowledge()) { if (startBasicRequest.isAcknowledge()) {
parameters.putParam("acknowledge", "true"); parameters.putParam("acknowledge", "true");
} }
request.addParameters(parameters.asMap());
return request; return request;
} }

View File

@ -105,11 +105,11 @@ final class MLRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (getJobRequest.getAllowNoJobs() != null) { if (getJobRequest.getAllowNoJobs() != null) {
params.putParam("allow_no_jobs", Boolean.toString(getJobRequest.getAllowNoJobs())); params.putParam("allow_no_jobs", Boolean.toString(getJobRequest.getAllowNoJobs()));
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -122,10 +122,11 @@ final class MLRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (getJobStatsRequest.getAllowNoJobs() != null) { if (getJobStatsRequest.getAllowNoJobs() != null) {
params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.getAllowNoJobs())); params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.getAllowNoJobs()));
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -171,14 +172,14 @@ final class MLRequestConverters {
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (deleteJobRequest.getForce() != null) { if (deleteJobRequest.getForce() != null) {
params.putParam("force", Boolean.toString(deleteJobRequest.getForce())); params.putParam("force", Boolean.toString(deleteJobRequest.getForce()));
} }
if (deleteJobRequest.getWaitForCompletion() != null) { if (deleteJobRequest.getWaitForCompletion() != null) {
params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion())); params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion()));
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -249,12 +250,12 @@ final class MLRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (getDatafeedRequest.getAllowNoDatafeeds() != null) { if (getDatafeedRequest.getAllowNoDatafeeds() != null) {
params.putParam(GetDatafeedRequest.ALLOW_NO_DATAFEEDS.getPreferredName(), params.putParam(GetDatafeedRequest.ALLOW_NO_DATAFEEDS.getPreferredName(),
Boolean.toString(getDatafeedRequest.getAllowNoDatafeeds())); Boolean.toString(getDatafeedRequest.getAllowNoDatafeeds()));
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -265,10 +266,11 @@ final class MLRequestConverters {
.addPathPart(deleteDatafeedRequest.getDatafeedId()) .addPathPart(deleteDatafeedRequest.getDatafeedId())
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (deleteDatafeedRequest.getForce() != null) { if (deleteDatafeedRequest.getForce() != null) {
params.putParam("force", Boolean.toString(deleteDatafeedRequest.getForce())); params.putParam("force", Boolean.toString(deleteDatafeedRequest.getForce()));
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -305,10 +307,11 @@ final class MLRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (getDatafeedStatsRequest.getAllowNoDatafeeds() != null) { if (getDatafeedStatsRequest.getAllowNoDatafeeds() != null) {
params.putParam("allow_no_datafeeds", Boolean.toString(getDatafeedStatsRequest.getAllowNoDatafeeds())); params.putParam("allow_no_datafeeds", Boolean.toString(getDatafeedStatsRequest.getAllowNoDatafeeds()));
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -331,13 +334,14 @@ final class MLRequestConverters {
.addPathPart(Strings.collectionToCommaDelimitedString(deleteForecastRequest.getForecastIds())) .addPathPart(Strings.collectionToCommaDelimitedString(deleteForecastRequest.getForecastIds()))
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (deleteForecastRequest.getAllowNoForecasts() != null) { if (deleteForecastRequest.getAllowNoForecasts() != null) {
params.putParam("allow_no_forecasts", Boolean.toString(deleteForecastRequest.getAllowNoForecasts())); params.putParam("allow_no_forecasts", Boolean.toString(deleteForecastRequest.getAllowNoForecasts()));
} }
if (deleteForecastRequest.timeout() != null) { if (deleteForecastRequest.timeout() != null) {
params.putParam("timeout", deleteForecastRequest.timeout().getStringRep()); params.putParam("timeout", deleteForecastRequest.timeout().getStringRep());
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -453,7 +457,7 @@ final class MLRequestConverters {
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (postDataRequest.getResetStart() != null) { if (postDataRequest.getResetStart() != null) {
params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart()); params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart());
} }
@ -461,6 +465,7 @@ final class MLRequestConverters {
params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd()); params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd());
} }
BytesReference content = postDataRequest.getContent(); BytesReference content = postDataRequest.getContent();
request.addParameters(params.asMap());
if (content != null) { if (content != null) {
BytesRef source = postDataRequest.getContent().toBytesRef(); BytesRef source = postDataRequest.getContent().toBytesRef();
HttpEntity byteEntity = new NByteArrayEntity(source.bytes, HttpEntity byteEntity = new NByteArrayEntity(source.bytes,
@ -594,13 +599,14 @@ final class MLRequestConverters {
.addPathPart(getFiltersRequest.getFilterId()) .addPathPart(getFiltersRequest.getFilterId())
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (getFiltersRequest.getSize() != null) { if (getFiltersRequest.getSize() != null) {
params.putParam(PageParams.SIZE.getPreferredName(), getFiltersRequest.getSize().toString()); params.putParam(PageParams.SIZE.getPreferredName(), getFiltersRequest.getSize().toString());
} }
if (getFiltersRequest.getFrom() != null) { if (getFiltersRequest.getFrom() != null) {
params.putParam(PageParams.FROM.getPreferredName(), getFiltersRequest.getFrom().toString()); params.putParam(PageParams.FROM.getPreferredName(), getFiltersRequest.getFrom().toString());
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -628,11 +634,12 @@ final class MLRequestConverters {
static Request setUpgradeMode(SetUpgradeModeRequest setUpgradeModeRequest) { static Request setUpgradeMode(SetUpgradeModeRequest setUpgradeModeRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "set_upgrade_mode").build(); String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "set_upgrade_mode").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.putParam(SetUpgradeModeRequest.ENABLED.getPreferredName(), Boolean.toString(setUpgradeModeRequest.isEnabled())); params.putParam(SetUpgradeModeRequest.ENABLED.getPreferredName(), Boolean.toString(setUpgradeModeRequest.isEnabled()));
if (setUpgradeModeRequest.getTimeout() != null) { if (setUpgradeModeRequest.getTimeout() != null) {
params.putParam(SetUpgradeModeRequest.TIMEOUT.getPreferredName(), setUpgradeModeRequest.getTimeout().toString()); params.putParam(SetUpgradeModeRequest.TIMEOUT.getPreferredName(), setUpgradeModeRequest.getTimeout().toString());
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -650,7 +657,7 @@ final class MLRequestConverters {
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (findFileStructureRequest.getLinesToSample() != null) { if (findFileStructureRequest.getLinesToSample() != null) {
params.putParam(FindFileStructureRequest.LINES_TO_SAMPLE.getPreferredName(), params.putParam(FindFileStructureRequest.LINES_TO_SAMPLE.getPreferredName(),
findFileStructureRequest.getLinesToSample().toString()); findFileStructureRequest.getLinesToSample().toString());
@ -695,7 +702,7 @@ final class MLRequestConverters {
if (findFileStructureRequest.getExplain() != null) { if (findFileStructureRequest.getExplain() != null) {
params.putParam(FindFileStructureRequest.EXPLAIN.getPreferredName(), findFileStructureRequest.getExplain().toString()); params.putParam(FindFileStructureRequest.EXPLAIN.getPreferredName(), findFileStructureRequest.getExplain().toString());
} }
request.addParameters(params.asMap());
BytesReference sample = findFileStructureRequest.getSample(); BytesReference sample = findFileStructureRequest.getSample();
BytesRef source = sample.toBytesRef(); BytesRef source = sample.toBytesRef();
HttpEntity byteEntity = new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(XContentType.JSON)); HttpEntity byteEntity = new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(XContentType.JSON));

View File

@ -88,8 +88,10 @@ import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.StringJoiner; import java.util.StringJoiner;
final class RequestConverters { final class RequestConverters {
@ -103,7 +105,7 @@ final class RequestConverters {
String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id()); String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
Params parameters = new Params(request); Params parameters = new Params();
parameters.withRouting(deleteRequest.routing()); parameters.withRouting(deleteRequest.routing());
parameters.withTimeout(deleteRequest.timeout()); parameters.withTimeout(deleteRequest.timeout());
parameters.withVersion(deleteRequest.version()); parameters.withVersion(deleteRequest.version());
@ -112,6 +114,7 @@ final class RequestConverters {
parameters.withIfPrimaryTerm(deleteRequest.ifPrimaryTerm()); parameters.withIfPrimaryTerm(deleteRequest.ifPrimaryTerm());
parameters.withRefreshPolicy(deleteRequest.getRefreshPolicy()); parameters.withRefreshPolicy(deleteRequest.getRefreshPolicy());
parameters.withWaitForActiveShards(deleteRequest.waitForActiveShards()); parameters.withWaitForActiveShards(deleteRequest.waitForActiveShards());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -122,7 +125,7 @@ final class RequestConverters {
static Request bulk(BulkRequest bulkRequest) throws IOException { static Request bulk(BulkRequest bulkRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_bulk"); Request request = new Request(HttpPost.METHOD_NAME, "/_bulk");
Params parameters = new Params(request); Params parameters = new Params();
parameters.withTimeout(bulkRequest.timeout()); parameters.withTimeout(bulkRequest.timeout());
parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy()); parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy());
parameters.withPipeline(bulkRequest.pipeline()); parameters.withPipeline(bulkRequest.pipeline());
@ -249,6 +252,7 @@ final class RequestConverters {
content.write(separator); content.write(separator);
} }
} }
request.addParameters(parameters.asMap());
request.setEntity(new NByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType)); request.setEntity(new NByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType));
return request; return request;
} }
@ -264,7 +268,7 @@ final class RequestConverters {
private static Request getStyleRequest(String method, GetRequest getRequest) { private static Request getStyleRequest(String method, GetRequest getRequest) {
Request request = new Request(method, endpoint(getRequest.index(), getRequest.type(), getRequest.id())); Request request = new Request(method, endpoint(getRequest.index(), getRequest.type(), getRequest.id()));
Params parameters = new Params(request); Params parameters = new Params();
parameters.withPreference(getRequest.preference()); parameters.withPreference(getRequest.preference());
parameters.withRouting(getRequest.routing()); parameters.withRouting(getRequest.routing());
parameters.withRefresh(getRequest.refresh()); parameters.withRefresh(getRequest.refresh());
@ -273,7 +277,7 @@ final class RequestConverters {
parameters.withVersion(getRequest.version()); parameters.withVersion(getRequest.version());
parameters.withVersionType(getRequest.versionType()); parameters.withVersionType(getRequest.versionType());
parameters.withFetchSourceContext(getRequest.fetchSourceContext()); parameters.withFetchSourceContext(getRequest.fetchSourceContext());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -286,23 +290,24 @@ final class RequestConverters {
endpoint = endpoint(getRequest.index(), optionalType, getRequest.id(), "_source"); endpoint = endpoint(getRequest.index(), optionalType, getRequest.id(), "_source");
} }
Request request = new Request(HttpHead.METHOD_NAME, endpoint); Request request = new Request(HttpHead.METHOD_NAME, endpoint);
Params parameters = new Params(request); Params parameters = new Params();
parameters.withPreference(getRequest.preference()); parameters.withPreference(getRequest.preference());
parameters.withRouting(getRequest.routing()); parameters.withRouting(getRequest.routing());
parameters.withRefresh(getRequest.refresh()); parameters.withRefresh(getRequest.refresh());
parameters.withRealtime(getRequest.realtime()); parameters.withRealtime(getRequest.realtime());
// Version params are not currently supported by the source exists API so are not passed // Version params are not currently supported by the source exists API so are not passed
request.addParameters(parameters.asMap());
return request; return request;
} }
static Request multiGet(MultiGetRequest multiGetRequest) throws IOException { static Request multiGet(MultiGetRequest multiGetRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_mget"); Request request = new Request(HttpPost.METHOD_NAME, "/_mget");
Params parameters = new Params(request); Params parameters = new Params();
parameters.withPreference(multiGetRequest.preference()); parameters.withPreference(multiGetRequest.preference());
parameters.withRealtime(multiGetRequest.realtime()); parameters.withRealtime(multiGetRequest.realtime());
parameters.withRefresh(multiGetRequest.refresh()); parameters.withRefresh(multiGetRequest.refresh());
request.addParameters(parameters.asMap());
request.setEntity(createEntity(multiGetRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(multiGetRequest, REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -321,7 +326,7 @@ final class RequestConverters {
Request request = new Request(method, endpoint); Request request = new Request(method, endpoint);
Params parameters = new Params(request); Params parameters = new Params();
parameters.withRouting(indexRequest.routing()); parameters.withRouting(indexRequest.routing());
parameters.withTimeout(indexRequest.timeout()); parameters.withTimeout(indexRequest.timeout());
parameters.withVersion(indexRequest.version()); parameters.withVersion(indexRequest.version());
@ -334,6 +339,7 @@ final class RequestConverters {
BytesRef source = indexRequest.source().toBytesRef(); BytesRef source = indexRequest.source().toBytesRef();
ContentType contentType = createContentType(indexRequest.getContentType()); ContentType contentType = createContentType(indexRequest.getContentType());
request.addParameters(parameters.asMap());
request.setEntity(new NByteArrayEntity(source.bytes, source.offset, source.length, contentType)); request.setEntity(new NByteArrayEntity(source.bytes, source.offset, source.length, contentType));
return request; return request;
} }
@ -348,7 +354,7 @@ final class RequestConverters {
: endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); : endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update");
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params parameters = new Params(request); Params parameters = new Params();
parameters.withRouting(updateRequest.routing()); parameters.withRouting(updateRequest.routing());
parameters.withTimeout(updateRequest.timeout()); parameters.withTimeout(updateRequest.timeout());
parameters.withRefreshPolicy(updateRequest.getRefreshPolicy()); parameters.withRefreshPolicy(updateRequest.getRefreshPolicy());
@ -379,6 +385,7 @@ final class RequestConverters {
if (xContentType == null) { if (xContentType == null) {
xContentType = Requests.INDEX_CONTENT_TYPE; xContentType = Requests.INDEX_CONTENT_TYPE;
} }
request.addParameters(parameters.asMap());
request.setEntity(createEntity(updateRequest, xContentType)); request.setEntity(createEntity(updateRequest, xContentType));
return request; return request;
} }
@ -393,12 +400,13 @@ final class RequestConverters {
static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException { static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint)); Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint));
Params params = new Params(request); Params params = new Params();
addSearchRequestParams(params, searchRequest); addSearchRequestParams(params, searchRequest);
if (searchRequest.source() != null) { if (searchRequest.source() != null) {
request.setEntity(createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE));
} }
request.addParameters(params.asMap());
return request; return request;
} }
@ -436,7 +444,7 @@ final class RequestConverters {
static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOException { static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_msearch"); Request request = new Request(HttpPost.METHOD_NAME, "/_msearch");
Params params = new Params(request); Params params = new Params();
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
if (multiSearchRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) { if (multiSearchRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) {
params.putParam("max_concurrent_searches", Integer.toString(multiSearchRequest.maxConcurrentSearchRequests())); params.putParam("max_concurrent_searches", Integer.toString(multiSearchRequest.maxConcurrentSearchRequests()));
@ -444,6 +452,7 @@ final class RequestConverters {
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent(); XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent); byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent);
request.addParameters(params.asMap());
request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type()))); request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type())));
return request; return request;
} }
@ -458,8 +467,9 @@ final class RequestConverters {
String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template"); String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template");
request = new Request(HttpGet.METHOD_NAME, endpoint); request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params(request); Params params = new Params();
addSearchRequestParams(params, searchRequest); addSearchRequestParams(params, searchRequest);
request.addParameters(params.asMap());
} }
request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE));
@ -469,7 +479,7 @@ final class RequestConverters {
static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException { static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_msearch/template"); Request request = new Request(HttpPost.METHOD_NAME, "/_msearch/template");
Params params = new Params(request); Params params = new Params();
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
if (multiSearchTemplateRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) { if (multiSearchTemplateRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) {
params.putParam("max_concurrent_searches", Integer.toString(multiSearchTemplateRequest.maxConcurrentSearchRequests())); params.putParam("max_concurrent_searches", Integer.toString(multiSearchTemplateRequest.maxConcurrentSearchRequests()));
@ -483,10 +493,11 @@ final class RequestConverters {
static Request count(CountRequest countRequest) throws IOException { static Request count(CountRequest countRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, endpoint(countRequest.indices(), countRequest.types(), "_count")); Request request = new Request(HttpPost.METHOD_NAME, endpoint(countRequest.indices(), countRequest.types(), "_count"));
Params params = new Params(request); Params params = new Params();
params.withRouting(countRequest.routing()); params.withRouting(countRequest.routing());
params.withPreference(countRequest.preference()); params.withPreference(countRequest.preference());
params.withIndicesOptions(countRequest.indicesOptions()); params.withIndicesOptions(countRequest.indicesOptions());
request.addParameters(params.asMap());
request.setEntity(createEntity(countRequest.source(), REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(countRequest.source(), REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -497,11 +508,12 @@ final class RequestConverters {
: endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain"); : endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain");
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params(request); Params params = new Params();
params.withStoredFields(explainRequest.storedFields()); params.withStoredFields(explainRequest.storedFields());
params.withFetchSourceContext(explainRequest.fetchSourceContext()); params.withFetchSourceContext(explainRequest.fetchSourceContext());
params.withRouting(explainRequest.routing()); params.withRouting(explainRequest.routing());
params.withPreference(explainRequest.preference()); params.withPreference(explainRequest.preference());
request.addParameters(params.asMap());
request.setEntity(createEntity(explainRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(explainRequest, REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -509,18 +521,19 @@ final class RequestConverters {
static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) { static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) {
Request request = new Request(HttpGet.METHOD_NAME, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps")); Request request = new Request(HttpGet.METHOD_NAME, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps"));
Params params = new Params(request); Params params = new Params();
params.withFields(fieldCapabilitiesRequest.fields()); params.withFields(fieldCapabilitiesRequest.fields());
params.withIndicesOptions(fieldCapabilitiesRequest.indicesOptions()); params.withIndicesOptions(fieldCapabilitiesRequest.indicesOptions());
request.addParameters(params.asMap());
return request; return request;
} }
static Request rankEval(RankEvalRequest rankEvalRequest) throws IOException { static Request rankEval(RankEvalRequest rankEvalRequest) throws IOException {
Request request = new Request(HttpGet.METHOD_NAME, endpoint(rankEvalRequest.indices(), Strings.EMPTY_ARRAY, "_rank_eval")); Request request = new Request(HttpGet.METHOD_NAME, endpoint(rankEvalRequest.indices(), Strings.EMPTY_ARRAY, "_rank_eval"));
Params params = new Params(request); Params params = new Params();
params.withIndicesOptions(rankEvalRequest.indicesOptions()); params.withIndicesOptions(rankEvalRequest.indicesOptions());
request.addParameters(params.asMap());
request.setEntity(createEntity(rankEvalRequest.getRankEvalSpec(), REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(rankEvalRequest.getRankEvalSpec(), REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -536,7 +549,7 @@ final class RequestConverters {
private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException { private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException {
String endpoint = new EndpointBuilder().addPathPart("_reindex").build(); String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params(request) Params params = new Params()
.withWaitForCompletion(waitForCompletion) .withWaitForCompletion(waitForCompletion)
.withRefresh(reindexRequest.isRefresh()) .withRefresh(reindexRequest.isRefresh())
.withTimeout(reindexRequest.getTimeout()) .withTimeout(reindexRequest.getTimeout())
@ -546,6 +559,7 @@ final class RequestConverters {
if (reindexRequest.getScrollTime() != null) { if (reindexRequest.getScrollTime() != null) {
params.putParam("scroll", reindexRequest.getScrollTime()); params.putParam("scroll", reindexRequest.getScrollTime());
} }
request.addParameters(params.asMap());
request.setEntity(createEntity(reindexRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(reindexRequest, REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -554,7 +568,7 @@ final class RequestConverters {
String endpoint = String endpoint =
endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query"); endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params(request) Params params = new Params()
.withRouting(updateByQueryRequest.getRouting()) .withRouting(updateByQueryRequest.getRouting())
.withPipeline(updateByQueryRequest.getPipeline()) .withPipeline(updateByQueryRequest.getPipeline())
.withRefresh(updateByQueryRequest.isRefresh()) .withRefresh(updateByQueryRequest.isRefresh())
@ -574,6 +588,7 @@ final class RequestConverters {
if (updateByQueryRequest.getSize() > 0) { if (updateByQueryRequest.getSize() > 0) {
params.putParam("size", Integer.toString(updateByQueryRequest.getSize())); params.putParam("size", Integer.toString(updateByQueryRequest.getSize()));
} }
request.addParameters(params.asMap());
request.setEntity(createEntity(updateByQueryRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(updateByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -582,7 +597,7 @@ final class RequestConverters {
String endpoint = String endpoint =
endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query"); endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params(request) Params params = new Params()
.withRouting(deleteByQueryRequest.getRouting()) .withRouting(deleteByQueryRequest.getRouting())
.withRefresh(deleteByQueryRequest.isRefresh()) .withRefresh(deleteByQueryRequest.isRefresh())
.withTimeout(deleteByQueryRequest.getTimeout()) .withTimeout(deleteByQueryRequest.getTimeout())
@ -601,6 +616,7 @@ final class RequestConverters {
if (deleteByQueryRequest.getSize() > 0) { if (deleteByQueryRequest.getSize() > 0) {
params.putParam("size", Integer.toString(deleteByQueryRequest.getSize())); params.putParam("size", Integer.toString(deleteByQueryRequest.getSize()));
} }
request.addParameters(params.asMap());
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -621,22 +637,24 @@ final class RequestConverters {
String endpoint = new EndpointBuilder().addPathPart(firstPathPart).addPathPart(rethrottleRequest.getTaskId().toString()) String endpoint = new EndpointBuilder().addPathPart(firstPathPart).addPathPart(rethrottleRequest.getTaskId().toString())
.addPathPart("_rethrottle").build(); .addPathPart("_rethrottle").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params(request) Params params = new Params()
.withRequestsPerSecond(rethrottleRequest.getRequestsPerSecond()); .withRequestsPerSecond(rethrottleRequest.getRequestsPerSecond());
// we set "group_by" to "none" because this is the response format we can parse back // we set "group_by" to "none" because this is the response format we can parse back
params.putParam("group_by", "none"); params.putParam("group_by", "none");
request.addParameters(params.asMap());
return request; return request;
} }
static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException { static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build(); String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params(request); Params params = new Params();
params.withTimeout(putStoredScriptRequest.timeout()); params.withTimeout(putStoredScriptRequest.timeout());
params.withMasterTimeout(putStoredScriptRequest.masterNodeTimeout()); params.withMasterTimeout(putStoredScriptRequest.masterNodeTimeout());
if (Strings.hasText(putStoredScriptRequest.context())) { if (Strings.hasText(putStoredScriptRequest.context())) {
params.putParam("context", putStoredScriptRequest.context()); params.putParam("context", putStoredScriptRequest.context());
} }
request.addParameters(params.asMap());
request.setEntity(createEntity(putStoredScriptRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(putStoredScriptRequest, REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -667,11 +685,12 @@ final class RequestConverters {
} }
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params(request); Params params = new Params();
params.withRouting(tvrequest.getRouting()); params.withRouting(tvrequest.getRouting());
params.withPreference(tvrequest.getPreference()); params.withPreference(tvrequest.getPreference());
params.withFields(tvrequest.getFields()); params.withFields(tvrequest.getFields());
params.withRealtime(tvrequest.getRealtime()); params.withRealtime(tvrequest.getRealtime());
request.addParameters(params.asMap());
request.setEntity(createEntity(tvrequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(tvrequest, REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -686,17 +705,19 @@ final class RequestConverters {
static Request getScript(GetStoredScriptRequest getStoredScriptRequest) { static Request getScript(GetStoredScriptRequest getStoredScriptRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build(); String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params(request); Params params = new Params();
params.withMasterTimeout(getStoredScriptRequest.masterNodeTimeout()); params.withMasterTimeout(getStoredScriptRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) { static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(deleteStoredScriptRequest.id()).build(); String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(deleteStoredScriptRequest.id()).build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
Params params = new Params(request); Params params = new Params();
params.withTimeout(deleteStoredScriptRequest.timeout()); params.withTimeout(deleteStoredScriptRequest.timeout());
params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout()); params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request; return request;
} }
@ -756,15 +777,14 @@ final class RequestConverters {
* a {@link Request} and adds the parameters to it directly. * a {@link Request} and adds the parameters to it directly.
*/ */
static class Params { static class Params {
private final Request request; private final Map<String,String> parameters = new HashMap<>();
Params(Request request) { Params() {
this.request = request;
} }
Params putParam(String name, String value) { Params putParam(String name, String value) {
if (Strings.hasLength(value)) { if (Strings.hasLength(value)) {
request.addParameter(name, value); parameters.put(name,value);
} }
return this; return this;
} }
@ -776,6 +796,10 @@ final class RequestConverters {
return this; return this;
} }
Map<String, String> asMap(){
return parameters;
}
Params withDocAsUpsert(boolean docAsUpsert) { Params withDocAsUpsert(boolean docAsUpsert) {
if (docAsUpsert) { if (docAsUpsert) {
return putParam("doc_as_upsert", Boolean.TRUE.toString()); return putParam("doc_as_upsert", Boolean.TRUE.toString());
@ -939,6 +963,7 @@ final class RequestConverters {
expandWildcards = joiner.toString(); expandWildcards = joiner.toString();
} }
putParam("expand_wildcards", expandWildcards); putParam("expand_wildcards", expandWildcards);
putParam("ignore_throttled", Boolean.toString(indicesOptions.ignoreThrottled()));
} }
return this; return this;
} }

View File

@ -68,11 +68,12 @@ final class RollupRequestConverters {
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(stopRollupJobRequest.timeout()); parameters.withTimeout(stopRollupJobRequest.timeout());
if (stopRollupJobRequest.waitForCompletion() != null) { if (stopRollupJobRequest.waitForCompletion() != null) {
parameters.withWaitForCompletion(stopRollupJobRequest.waitForCompletion()); parameters.withWaitForCompletion(stopRollupJobRequest.waitForCompletion());
} }
request.addParameters(parameters.asMap());
return request; return request;
} }

View File

@ -66,8 +66,9 @@ final class SecurityRequestConverters {
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE));
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy()); params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -87,8 +88,9 @@ final class SecurityRequestConverters {
.build(); .build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putUserRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(putUserRequest, REQUEST_BODY_CONTENT_TYPE));
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putUserRequest.getRefreshPolicy()); params.withRefreshPolicy(putUserRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -98,8 +100,9 @@ final class SecurityRequestConverters {
.addPathPart(deleteUserRequest.getName()) .addPathPart(deleteUserRequest.getName())
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deleteUserRequest.getRefreshPolicy()); params.withRefreshPolicy(deleteUserRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -110,8 +113,9 @@ final class SecurityRequestConverters {
.build(); .build();
final Request request = new Request(HttpPut.METHOD_NAME, endpoint); final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRoleMappingRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(putRoleMappingRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params(request); final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putRoleMappingRequest.getRefreshPolicy()); params.withRefreshPolicy(putRoleMappingRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -139,8 +143,9 @@ final class SecurityRequestConverters {
.addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable") .addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable")
.build(); .build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy()); params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -161,8 +166,9 @@ final class SecurityRequestConverters {
final String endpoint = builder.addPathPartAsIs("_clear_cache").build(); final String endpoint = builder.addPathPartAsIs("_clear_cache").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
if (clearRealmCacheRequest.getUsernames().isEmpty() == false) { if (clearRealmCacheRequest.getUsernames().isEmpty() == false) {
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.putParam("usernames", Strings.collectionToCommaDelimitedString(clearRealmCacheRequest.getUsernames())); params.putParam("usernames", Strings.collectionToCommaDelimitedString(clearRealmCacheRequest.getUsernames()));
request.addParameters(params.asMap());
} }
return request; return request;
} }
@ -182,8 +188,9 @@ final class SecurityRequestConverters {
.addPathPart(deleteRoleMappingRequest.getName()) .addPathPart(deleteRoleMappingRequest.getName())
.build(); .build();
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint); final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params(request); final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deleteRoleMappingRequest.getRefreshPolicy()); params.withRefreshPolicy(deleteRoleMappingRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -193,8 +200,9 @@ final class SecurityRequestConverters {
.addPathPart(deleteRoleRequest.getName()) .addPathPart(deleteRoleRequest.getName())
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deleteRoleRequest.getRefreshPolicy()); params.withRefreshPolicy(deleteRoleRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -231,8 +239,9 @@ final class SecurityRequestConverters {
static Request putPrivileges(final PutPrivilegesRequest putPrivilegesRequest) throws IOException { static Request putPrivileges(final PutPrivilegesRequest putPrivilegesRequest) throws IOException {
Request request = new Request(HttpPut.METHOD_NAME, "/_security/privilege"); Request request = new Request(HttpPut.METHOD_NAME, "/_security/privilege");
request.setEntity(createEntity(putPrivilegesRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(putPrivilegesRequest, REQUEST_BODY_CONTENT_TYPE));
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putPrivilegesRequest.getRefreshPolicy()); params.withRefreshPolicy(putPrivilegesRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -243,8 +252,9 @@ final class SecurityRequestConverters {
.addCommaSeparatedPathParts(deletePrivilegeRequest.getPrivileges()) .addCommaSeparatedPathParts(deletePrivilegeRequest.getPrivileges())
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(deletePrivilegeRequest.getRefreshPolicy()); params.withRefreshPolicy(deletePrivilegeRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -255,16 +265,18 @@ final class SecurityRequestConverters {
.build(); .build();
final Request request = new Request(HttpPut.METHOD_NAME, endpoint); final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
request.setEntity(createEntity(putRoleRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(putRoleRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params(request); final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(putRoleRequest.getRefreshPolicy()); params.withRefreshPolicy(putRoleRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
static Request createApiKey(final CreateApiKeyRequest createApiKeyRequest) throws IOException { static Request createApiKey(final CreateApiKeyRequest createApiKeyRequest) throws IOException {
final Request request = new Request(HttpPost.METHOD_NAME, "/_security/api_key"); final Request request = new Request(HttpPost.METHOD_NAME, "/_security/api_key");
request.setEntity(createEntity(createApiKeyRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(createApiKeyRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params(request); final RequestConverters.Params params = new RequestConverters.Params();
params.withRefreshPolicy(createApiKeyRequest.getRefreshPolicy()); params.withRefreshPolicy(createApiKeyRequest.getRefreshPolicy());
request.addParameters(params.asMap());
return request; return request;
} }
@ -282,13 +294,13 @@ final class SecurityRequestConverters {
if (Strings.hasText(getApiKeyRequest.getRealmName())) { if (Strings.hasText(getApiKeyRequest.getRealmName())) {
request.addParameter("realm_name", getApiKeyRequest.getRealmName()); request.addParameter("realm_name", getApiKeyRequest.getRealmName());
} }
return request; return request;
} }
static Request invalidateApiKey(final InvalidateApiKeyRequest invalidateApiKeyRequest) throws IOException { static Request invalidateApiKey(final InvalidateApiKeyRequest invalidateApiKeyRequest) throws IOException {
final Request request = new Request(HttpDelete.METHOD_NAME, "/_security/api_key"); final Request request = new Request(HttpDelete.METHOD_NAME, "/_security/api_key");
request.setEntity(createEntity(invalidateApiKeyRequest, REQUEST_BODY_CONTENT_TYPE)); request.setEntity(createEntity(invalidateApiKeyRequest, REQUEST_BODY_CONTENT_TYPE));
final RequestConverters.Params params = new RequestConverters.Params(request);
return request; return request;
} }
} }

View File

@ -46,9 +46,10 @@ final class SnapshotRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout()); parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout());
parameters.withLocal(getRepositoriesRequest.local()); parameters.withLocal(getRepositoriesRequest.local());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -56,11 +57,11 @@ final class SnapshotRequestConverters {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build(); String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout()); parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
parameters.withTimeout(putRepositoryRequest.timeout()); parameters.withTimeout(putRepositoryRequest.timeout());
parameters.withVerify(putRepositoryRequest.verify()); parameters.withVerify(putRepositoryRequest.verify());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(putRepositoryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(putRepositoryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -70,9 +71,10 @@ final class SnapshotRequestConverters {
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout()); parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout());
parameters.withTimeout(deleteRepositoryRequest.timeout()); parameters.withTimeout(deleteRepositoryRequest.timeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -83,9 +85,10 @@ final class SnapshotRequestConverters {
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout()); parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout());
parameters.withTimeout(verifyRepositoryRequest.timeout()); parameters.withTimeout(verifyRepositoryRequest.timeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -95,9 +98,10 @@ final class SnapshotRequestConverters {
.addPathPart(createSnapshotRequest.snapshot()) .addPathPart(createSnapshotRequest.snapshot())
.build(); .build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout()); params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion()); params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(createSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(createSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -114,11 +118,11 @@ final class SnapshotRequestConverters {
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout()); parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable())); parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose())); parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -130,9 +134,10 @@ final class SnapshotRequestConverters {
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout()); parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable()); parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
request.addParameters(parameters.asMap());
return request; return request;
} }
@ -143,9 +148,10 @@ final class SnapshotRequestConverters {
.addPathPartAsIs("_restore") .addPathPartAsIs("_restore")
.build(); .build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout()); parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout());
parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion()); parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(restoreSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); request.setEntity(RequestConverters.createEntity(restoreSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request; return request;
} }
@ -157,8 +163,9 @@ final class SnapshotRequestConverters {
.build(); .build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout()); parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
} }

View File

@ -32,12 +32,13 @@ final class TasksRequestConverters {
static Request cancelTasks(CancelTasksRequest cancelTasksRequest) { static Request cancelTasks(CancelTasksRequest cancelTasksRequest) {
Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel"); Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel");
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(cancelTasksRequest.getTimeout()) params.withTimeout(cancelTasksRequest.getTimeout())
.withTaskId(cancelTasksRequest.getTaskId()) .withTaskId(cancelTasksRequest.getTaskId())
.withNodes(cancelTasksRequest.getNodes()) .withNodes(cancelTasksRequest.getNodes())
.withParentTaskId(cancelTasksRequest.getParentTaskId()) .withParentTaskId(cancelTasksRequest.getParentTaskId())
.withActions(cancelTasksRequest.getActions()); .withActions(cancelTasksRequest.getActions());
request.addParameters(params.asMap());
return request; return request;
} }
@ -46,7 +47,7 @@ final class TasksRequestConverters {
throw new IllegalArgumentException("TaskId cannot be used for list tasks request"); throw new IllegalArgumentException("TaskId cannot be used for list tasks request");
} }
Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(listTaskRequest.getTimeout()) params.withTimeout(listTaskRequest.getTimeout())
.withDetailed(listTaskRequest.getDetailed()) .withDetailed(listTaskRequest.getDetailed())
.withWaitForCompletion(listTaskRequest.getWaitForCompletion()) .withWaitForCompletion(listTaskRequest.getWaitForCompletion())
@ -54,6 +55,7 @@ final class TasksRequestConverters {
.withNodes(listTaskRequest.getNodes()) .withNodes(listTaskRequest.getNodes())
.withActions(listTaskRequest.getActions()) .withActions(listTaskRequest.getActions())
.putParam("group_by", "none"); .putParam("group_by", "none");
request.addParameters(params.asMap());
return request; return request;
} }
@ -62,9 +64,10 @@ final class TasksRequestConverters {
.addPathPartAsIs(getTaskRequest.getNodeId() + ":" + Long.toString(getTaskRequest.getTaskId())) .addPathPartAsIs(getTaskRequest.getNodeId() + ":" + Long.toString(getTaskRequest.getTaskId()))
.build(); .build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(getTaskRequest.getTimeout()) params.withTimeout(getTaskRequest.getTimeout())
.withWaitForCompletion(getTaskRequest.getWaitForCompletion()); .withWaitForCompletion(getTaskRequest.getWaitForCompletion());
request.addParameters(params.asMap());
return request; return request;
} }

View File

@ -69,12 +69,13 @@ final class WatcherRequestConverters {
.build(); .build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint); Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request) RequestConverters.Params params = new RequestConverters.Params()
.withIfSeqNo(putWatchRequest.ifSeqNo()) .withIfSeqNo(putWatchRequest.ifSeqNo())
.withIfPrimaryTerm(putWatchRequest.ifPrimaryTerm()); .withIfPrimaryTerm(putWatchRequest.ifPrimaryTerm());
if (putWatchRequest.isActive() == false) { if (putWatchRequest.isActive() == false) {
params.putParam("active", "false"); params.putParam("active", "false");
} }
request.addParameters(params.asMap());
ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType()); ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType());
BytesReference source = putWatchRequest.getSource(); BytesReference source = putWatchRequest.getSource();
request.setEntity(new NByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType)); request.setEntity(new NByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
@ -118,7 +119,7 @@ final class WatcherRequestConverters {
.addPathPartAsIs("_execute").build(); .addPathPartAsIs("_execute").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint); Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request); RequestConverters.Params params = new RequestConverters.Params();
if (executeWatchRequest.isDebug()) { if (executeWatchRequest.isDebug()) {
params.putParam("debug", "true"); params.putParam("debug", "true");
} }
@ -128,7 +129,7 @@ final class WatcherRequestConverters {
if (executeWatchRequest.recordExecution()) { if (executeWatchRequest.recordExecution()) {
params.putParam("record_execution", "true"); params.putParam("record_execution", "true");
} }
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(executeWatchRequest, XContentType.JSON)); request.setEntity(RequestConverters.createEntity(executeWatchRequest, XContentType.JSON));
return request; return request;
} }
@ -158,7 +159,7 @@ final class WatcherRequestConverters {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_watcher", "stats"); RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_watcher", "stats");
String endpoint = builder.build(); String endpoint = builder.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint); Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
StringBuilder metric = new StringBuilder(); StringBuilder metric = new StringBuilder();
if (watcherStatsRequest.includeCurrentWatches()) { if (watcherStatsRequest.includeCurrentWatches()) {
metric.append("current_watches"); metric.append("current_watches");
@ -172,6 +173,7 @@ final class WatcherRequestConverters {
if (metric.length() > 0) { if (metric.length() > 0) {
parameters.putParam("metric", metric.toString()); parameters.putParam("metric", metric.toString());
} }
request.addParameters(parameters.asMap());
return request; return request;
} }
} }

View File

@ -46,8 +46,9 @@ final class XPackRequestConverters {
static Request usage(XPackUsageRequest usageRequest) { static Request usage(XPackUsageRequest usageRequest) {
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage");
RequestConverters.Params parameters = new RequestConverters.Params(request); RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(usageRequest.masterNodeTimeout()); parameters.withMasterTimeout(usageRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
return request; return request;
} }
} }

View File

@ -30,21 +30,19 @@ import java.util.List;
public class StartDataFrameTransformResponse extends AcknowledgedTasksResponse { public class StartDataFrameTransformResponse extends AcknowledgedTasksResponse {
private static final String STARTED = "started"; private static final String ACKNOWLEDGED = "acknowledged";
private static final ConstructingObjectParser<StartDataFrameTransformResponse, Void> PARSER = private static final ConstructingObjectParser<StartDataFrameTransformResponse, Void> PARSER =
AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new, STARTED); AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new,
ACKNOWLEDGED);
public static StartDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { public static StartDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null); return PARSER.parse(parser, null);
} }
public StartDataFrameTransformResponse(boolean started, @Nullable List<TaskOperationFailure> taskFailures, public StartDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) { @Nullable List<? extends ElasticsearchException> nodeFailures) {
super(started, taskFailures, nodeFailures); super(acknowledged, taskFailures, nodeFailures);
} }
public boolean isStarted() {
return isAcknowledged();
}
} }

View File

@ -30,21 +30,18 @@ import java.util.List;
public class StopDataFrameTransformResponse extends AcknowledgedTasksResponse { public class StopDataFrameTransformResponse extends AcknowledgedTasksResponse {
private static final String STOPPED = "stopped"; private static final String ACKNOWLEDGED = "acknowledged";
private static final ConstructingObjectParser<StopDataFrameTransformResponse, Void> PARSER = private static final ConstructingObjectParser<StopDataFrameTransformResponse, Void> PARSER = AcknowledgedTasksResponse
AcknowledgedTasksResponse.generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, STOPPED); .generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, ACKNOWLEDGED);
public static StopDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException { public static StopDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
return PARSER.parse(parser, null); return PARSER.parse(parser, null);
} }
public StopDataFrameTransformResponse(boolean stopped, @Nullable List<TaskOperationFailure> taskFailures, public StopDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
@Nullable List<? extends ElasticsearchException> nodeFailures) { @Nullable List<? extends ElasticsearchException> nodeFailures) {
super(stopped, taskFailures, nodeFailures); super(acknowledged, taskFailures, nodeFailures);
} }
public boolean isStopped() {
return isAcknowledged();
}
} }

View File

@ -258,7 +258,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id); StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id);
StartDataFrameTransformResponse startResponse = StartDataFrameTransformResponse startResponse =
execute(startRequest, client::startDataFrameTransform, client::startDataFrameTransformAsync); execute(startRequest, client::startDataFrameTransform, client::startDataFrameTransformAsync);
assertTrue(startResponse.isStarted()); assertTrue(startResponse.isAcknowledged());
assertThat(startResponse.getNodeFailures(), empty()); assertThat(startResponse.getNodeFailures(), empty());
assertThat(startResponse.getTaskFailures(), empty()); assertThat(startResponse.getTaskFailures(), empty());
@ -271,7 +271,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null); StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null);
StopDataFrameTransformResponse stopResponse = StopDataFrameTransformResponse stopResponse =
execute(stopRequest, client::stopDataFrameTransform, client::stopDataFrameTransformAsync); execute(stopRequest, client::stopDataFrameTransform, client::stopDataFrameTransformAsync);
assertTrue(stopResponse.isStopped()); assertTrue(stopResponse.isAcknowledged());
assertThat(stopResponse.getNodeFailures(), empty()); assertThat(stopResponse.getNodeFailures(), empty());
assertThat(stopResponse.getTaskFailures(), empty()); assertThat(stopResponse.getTaskFailures(), empty());
} }
@ -358,7 +358,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
StartDataFrameTransformResponse startTransformResponse = execute(new StartDataFrameTransformRequest(id), StartDataFrameTransformResponse startTransformResponse = execute(new StartDataFrameTransformRequest(id),
client::startDataFrameTransform, client::startDataFrameTransform,
client::startDataFrameTransformAsync); client::startDataFrameTransformAsync);
assertThat(startTransformResponse.isStarted(), is(true)); assertThat(startTransformResponse.isAcknowledged(), is(true));
assertBusy(() -> { assertBusy(() -> {
GetDataFrameTransformStatsResponse response = execute(new GetDataFrameTransformStatsRequest(id), GetDataFrameTransformStatsResponse response = execute(new GetDataFrameTransformStatsRequest(id),
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync); client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);

View File

@ -1567,7 +1567,7 @@ public class RequestConvertersTests extends ESTestCase {
endpoint.add("_field_caps"); endpoint.add("_field_caps");
assertEquals(endpoint.toString(), request.getEndpoint()); assertEquals(endpoint.toString(), request.getEndpoint());
assertEquals(4, request.getParameters().size()); assertEquals(5, request.getParameters().size());
// Note that we don't check the field param value explicitly, as field names are // Note that we don't check the field param value explicitly, as field names are
// passed through // passed through
@ -1601,7 +1601,7 @@ public class RequestConvertersTests extends ESTestCase {
} }
endpoint.add(RestRankEvalAction.ENDPOINT); endpoint.add(RestRankEvalAction.ENDPOINT);
assertEquals(endpoint.toString(), request.getEndpoint()); assertEquals(endpoint.toString(), request.getEndpoint());
assertEquals(3, request.getParameters().size()); assertEquals(4, request.getParameters().size());
assertEquals(expectedParams, request.getParameters()); assertEquals(expectedParams, request.getParameters());
assertToXContentBody(spec, request.getEntity()); assertToXContentBody(spec, request.getEntity());
} }
@ -1928,7 +1928,8 @@ public class RequestConvertersTests extends ESTestCase {
Map<String, String> expectedParams) { Map<String, String> expectedParams) {
if (randomBoolean()) { if (randomBoolean()) {
setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(),
true, false, false, randomBoolean()));
} }
expectedParams.put("ignore_unavailable", Boolean.toString(getter.get().ignoreUnavailable())); expectedParams.put("ignore_unavailable", Boolean.toString(getter.get().ignoreUnavailable()));
expectedParams.put("allow_no_indices", Boolean.toString(getter.get().allowNoIndices())); expectedParams.put("allow_no_indices", Boolean.toString(getter.get().allowNoIndices()));
@ -1941,11 +1942,13 @@ public class RequestConvertersTests extends ESTestCase {
} else { } else {
expectedParams.put("expand_wildcards", "none"); expectedParams.put("expand_wildcards", "none");
} }
expectedParams.put("ignore_throttled", Boolean.toString(getter.get().ignoreThrottled()));
} }
static IndicesOptions setRandomIndicesOptions(IndicesOptions indicesOptions, Map<String, String> expectedParams) { static IndicesOptions setRandomIndicesOptions(IndicesOptions indicesOptions, Map<String, String> expectedParams) {
if (randomBoolean()) { if (randomBoolean()) {
indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(),
true, false, false, randomBoolean());
} }
expectedParams.put("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable())); expectedParams.put("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable()));
expectedParams.put("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices())); expectedParams.put("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices()));
@ -1958,6 +1961,7 @@ public class RequestConvertersTests extends ESTestCase {
} else { } else {
expectedParams.put("expand_wildcards", "none"); expectedParams.put("expand_wildcards", "none");
} }
expectedParams.put("ignore_throttled", Boolean.toString(indicesOptions.ignoreThrottled()));
return indicesOptions; return indicesOptions;
} }

View File

@ -244,7 +244,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
request, RequestOptions.DEFAULT); request, RequestOptions.DEFAULT);
// end::start-data-frame-transform-execute // end::start-data-frame-transform-execute
assertTrue(response.isStarted()); assertTrue(response.isAcknowledged());
} }
{ {
// tag::stop-data-frame-transform-request // tag::stop-data-frame-transform-request
@ -263,7 +263,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
request, RequestOptions.DEFAULT); request, RequestOptions.DEFAULT);
// end::stop-data-frame-transform-execute // end::stop-data-frame-transform-execute
assertTrue(response.isStopped()); assertTrue(response.isAcknowledged());
} }
{ {
// tag::start-data-frame-transform-execute-listener // tag::start-data-frame-transform-execute-listener

View File

@ -81,6 +81,10 @@ public final class Request {
} }
} }
public void addParameters(Map<String, String> paramSource){
paramSource.forEach(this::addParameter);
}
/** /**
* Query string parameters. The returned map is an unmodifiable view of the * Query string parameters. The returned map is an unmodifiable view of the
* map in the request so calls to {@link #addParameter(String, String)} * map in the request so calls to {@link #addParameter(String, String)}

View File

@ -1,4 +1,3 @@
[role="xpack"]
[testenv="platinum"] [testenv="platinum"]
`max_read_request_operation_count`:: `max_read_request_operation_count`::
(integer) the maximum number of operations to pull per read from the remote (integer) the maximum number of operations to pull per read from the remote

View File

@ -41,7 +41,7 @@ When the {dataframe-transform} starts, you receive the following results:
[source,js] [source,js]
---- ----
{ {
"started" : true "acknowledged" : true
} }
---- ----
// TESTRESPONSE // TESTRESPONSE

View File

@ -61,7 +61,7 @@ When the {dataframe-transform} stops, you receive the following results:
[source,js] [source,js]
---- ----
{ {
"stopped" : true "acknowledged" : true
} }
---- ----
// TESTRESPONSE // TESTRESPONSE

View File

@ -16,7 +16,7 @@ PUT trips
}, },
"route_length_miles": { "route_length_miles": {
"type": "alias", "type": "alias",
"path": "distance" // <1> "path": "distance" <1>
}, },
"transit_mode": { "transit_mode": {
"type": "keyword" "type": "keyword"

View File

@ -193,7 +193,7 @@ phase. Instead, highlighting needs to be performed via
============================================= =============================================
[[limit-number-nested-fields]]
==== Limiting the number of `nested` fields ==== Limiting the number of `nested` fields
Indexing a document with 100 nested fields actually indexes 101 documents as each nested Indexing a document with 100 nested fields actually indexes 101 documents as each nested

View File

@ -11,11 +11,13 @@
[float] [float]
[[removed-global-ordinals-hash-and-global-ordinals-low-cardinality-terms-agg]]
==== Deprecated `global_ordinals_hash` and `global_ordinals_low_cardinality` execution hints for terms aggregations have been removed ==== Deprecated `global_ordinals_hash` and `global_ordinals_low_cardinality` execution hints for terms aggregations have been removed
These `execution_hint` are removed and should be replaced by `global_ordinals`. These `execution_hint` are removed and should be replaced by `global_ordinals`.
[float] [float]
[[search-max-buckets-cluster-setting]]
==== `search.max_buckets` in the cluster setting ==== `search.max_buckets` in the cluster setting
The dynamic cluster setting named `search.max_buckets` now defaults The dynamic cluster setting named `search.max_buckets` now defaults
@ -23,12 +25,14 @@ to 10,000 (instead of unlimited in the previous version).
Requests that try to return more than the limit will fail with an exception. Requests that try to return more than the limit will fail with an exception.
[float] [float]
[[missing-option-removed-composite-agg]]
==== `missing` option of the `composite` aggregation has been removed ==== `missing` option of the `composite` aggregation has been removed
The `missing` option of the `composite` aggregation, deprecated in 6.x, The `missing` option of the `composite` aggregation, deprecated in 6.x,
has been removed. `missing_bucket` should be used instead. has been removed. `missing_bucket` should be used instead.
[float] [float]
[[replace-params-agg-with-state-context-variable]]
==== Replaced `params._agg` with `state` context variable in scripted metric aggregations ==== Replaced `params._agg` with `state` context variable in scripted metric aggregations
The object used to share aggregation state between the scripts in a Scripted Metric The object used to share aggregation state between the scripts in a Scripted Metric
@ -36,12 +40,14 @@ Aggregation is now a variable called `state` available in the script context, ra
being provided via the `params` object as `params._agg`. being provided via the `params` object as `params._agg`.
[float] [float]
[[reduce-script-combine-script-params-mandatory]]
==== Make metric aggregation script parameters `reduce_script` and `combine_script` mandatory ==== Make metric aggregation script parameters `reduce_script` and `combine_script` mandatory
The metric aggregation has been changed to require these two script parameters to ensure users are The metric aggregation has been changed to require these two script parameters to ensure users are
explicitly defining how their data is processed. explicitly defining how their data is processed.
[float] [float]
[[percentiles-percentile-ranks-return-null-instead-nan]]
==== `percentiles` and `percentile_ranks` now return `null` instead of `NaN` ==== `percentiles` and `percentile_ranks` now return `null` instead of `NaN`
The `percentiles` and `percentile_ranks` aggregations used to return `NaN` in The `percentiles` and `percentile_ranks` aggregations used to return `NaN` in
@ -49,6 +55,7 @@ the response if they were applied to an empty set of values. Because `NaN` is
not officially supported by JSON, it has been replaced with `null`. not officially supported by JSON, it has been replaced with `null`.
[float] [float]
[[stats-extended-stats-return-zero-instead-null]]
==== `stats` and `extended_stats` now return 0 instead of `null` for zero docs ==== `stats` and `extended_stats` now return 0 instead of `null` for zero docs
When the `stats` and `extended_stats` aggregations collected zero docs (`doc_count: 0`), When the `stats` and `extended_stats` aggregations collected zero docs (`doc_count: 0`),

View File

@ -10,6 +10,7 @@
// end::notable-breaking-changes[] // end::notable-breaking-changes[]
[float] [float]
[[limit-number-of-tokens-produced-by-analyze]]
==== Limiting the number of tokens produced by _analyze ==== Limiting the number of tokens produced by _analyze
To safeguard against out of memory errors, the number of tokens that can be produced To safeguard against out of memory errors, the number of tokens that can be produced
@ -27,6 +28,7 @@ limited to 1000000. This default limit can be changed
for a particular index with the index setting `index.highlight.max_analyzed_offset`. for a particular index with the index setting `index.highlight.max_analyzed_offset`.
[float] [float]
[[delimited-payload-filter-renaming]]
==== `delimited_payload_filter` renaming ==== `delimited_payload_filter` renaming
The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2. The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2.
@ -35,6 +37,7 @@ name in new indices created in 7.0 will throw an error. Use the new name `delimi
instead. instead.
[float] [float]
[[standard-filter-removed]]
==== `standard` filter has been removed ==== `standard` filter has been removed
The `standard` token filter has been removed because it doesn't change anything in the stream. The `standard` token filter has been removed because it doesn't change anything in the stream.
@ -48,6 +51,7 @@ Indexes created using this analyzer will still be readable in elasticsearch 7.0,
but it will not be possible to create new indexes using it. but it will not be possible to create new indexes using it.
[float] [float]
[[deprecated-ngram-edgengram-token-filter-cannot-be-used]]
==== The deprecated `nGram` and `edgeNGram` token filter cannot be used on new indices ==== The deprecated `nGram` and `edgeNGram` token filter cannot be used on new indices
The `nGram` and `edgeNGram` token filter names have been deprecated in an earlier 6.x version. The `nGram` and `edgeNGram` token filter names have been deprecated in an earlier 6.x version.

View File

@ -76,6 +76,7 @@ pools. Note that `core` and `max` will be populated for scaling thread pools,
and `size` will be populated for fixed thread pools. and `size` will be populated for fixed thread pools.
[float] [float]
[[fields-param-removed-bulk-update-request]]
==== The parameter `fields` deprecated in 6.x has been removed from Bulk request ==== The parameter `fields` deprecated in 6.x has been removed from Bulk request
and Update request. The Update API returns `400 - Bad request` if request contains and Update request. The Update API returns `400 - Bad request` if request contains
unknown parameters (instead of ignored in the previous version). unknown parameters (instead of ignored in the previous version).
@ -118,6 +119,7 @@ body. Specifying `fields` in the request body as opposed to a parameter was depr
in 6.4.0, and is now unsupported in 7.0.0. in 6.4.0, and is now unsupported in 7.0.0.
[float] [float]
[[copy-settings-deprecated-shrink-split-apis]]
==== `copy_settings` is deprecated on shrink and split APIs ==== `copy_settings` is deprecated on shrink and split APIs
Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink
@ -143,6 +145,7 @@ current user was not authorized for any alias. An empty response with
status 200 - OK is now returned instead at all times. status 200 - OK is now returned instead at all times.
[float] [float]
[[user-object-removed-put-user-api]]
==== Put User API response no longer has `user` object ==== Put User API response no longer has `user` object
The Put User API response was changed in 6.5.0 to add the `created` field The Put User API response was changed in 6.5.0 to add the `created` field
@ -150,6 +153,7 @@ outside of the user object where it previously had been. In 7.0.0 the user
object has been removed in favor of the top level `created` field. object has been removed in favor of the top level `created` field.
[float] [float]
[[source-include-exclude-params-removed]]
==== Source filtering url parameters `_source_include` and `_source_exclude` have been removed ==== Source filtering url parameters `_source_include` and `_source_exclude` have been removed
The deprecated in 6.x url parameters are now removed. Use `_source_includes` and `_source_excludes` instead. The deprecated in 6.x url parameters are now removed. Use `_source_includes` and `_source_excludes` instead.
@ -168,6 +172,7 @@ removed.
[float] [float]
[[deprecated-termvector-endpoint-removed]]
==== Deprecated `_termvector` endpoint removed ==== Deprecated `_termvector` endpoint removed
The `_termvector` endpoint was deprecated in 2.0 and has now been removed. The `_termvector` endpoint was deprecated in 2.0 and has now been removed.
@ -186,6 +191,7 @@ using the `allow_restricted_indices` flag on the permission (as any other index
privilege). privilege).
[float] [float]
[[remove-get-support-cache-clear-api]]
==== Removed support for `GET` on the `_cache/clear` API ==== Removed support for `GET` on the `_cache/clear` API
The `_cache/clear` API no longer supports the `GET` HTTP verb. It must be called The `_cache/clear` API no longer supports the `GET` HTTP verb. It must be called

View File

@ -17,6 +17,7 @@ Due to cross-cluster search using `:` to separate a cluster and index name,
cluster names may no longer contain `:`. cluster names may no longer contain `:`.
[float] [float]
[[new-default-wait-for-active-shards-param]]
==== New default for `wait_for_active_shards` parameter of the open index command ==== New default for `wait_for_active_shards` parameter of the open index command
The default value for the `wait_for_active_shards` parameter of the open index API The default value for the `wait_for_active_shards` parameter of the open index API
@ -24,6 +25,7 @@ is changed from 0 to 1, which means that the command will now by default wait fo
primary shards of the opened index to be allocated. primary shards of the opened index to be allocated.
[float] [float]
[[shard-preferences-removed]]
==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed ==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed
These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences. These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences.

View File

@ -53,6 +53,7 @@ above. If you are preparing to upgrade from an earlier version, you must set
`discovery.zen.ping.unicast.hosts` or `discovery.zen.hosts_provider`. `discovery.zen.ping.unicast.hosts` or `discovery.zen.hosts_provider`.
[float] [float]
[[new-name-no-master-block-setting]]
==== New name for `no_master_block` setting ==== New name for `no_master_block` setting
The `discovery.zen.no_master_block` setting is now known as The `discovery.zen.no_master_block` setting is now known as

View File

@ -22,12 +22,14 @@ Due to cross-cluster search using `:` to separate a cluster and index name,
index names may no longer contain `:`. index names may no longer contain `:`.
[float] [float]
[[index-unassigned-node-left-delayed-timeout-no-longer-negative]]
==== `index.unassigned.node_left.delayed_timeout` may no longer be negative ==== `index.unassigned.node_left.delayed_timeout` may no longer be negative
Negative values were interpreted as zero in earlier versions but are no Negative values were interpreted as zero in earlier versions but are no
longer accepted. longer accepted.
[float] [float]
[[flush-force-merge-no-longer-refresh]]
==== `_flush` and `_force_merge` will no longer refresh ==== `_flush` and `_force_merge` will no longer refresh
In previous versions issuing a `_flush` or `_force_merge` (with `flush=true`) In previous versions issuing a `_flush` or `_force_merge` (with `flush=true`)
@ -85,6 +87,7 @@ The following previously deprecated url parameter have been removed:
* `field_data` - use `fielddata` instead * `field_data` - use `fielddata` instead
[float] [float]
[[network-breaker-inflight-requests-overhead-increased-to-2]]
==== `network.breaker.inflight_requests.overhead` increased to 2 ==== `network.breaker.inflight_requests.overhead` increased to 2
Previously the in flight requests circuit breaker considered only the raw byte representation. Previously the in flight requests circuit breaker considered only the raw byte representation.
@ -108,11 +111,13 @@ there is less need for fielddata. Therefore, the default value of the setting
heap size. heap size.
[float] [float]
[[fix-value-for-index-shard-check-on-startup-removed]]
==== `fix` value for `index.shard.check_on_startup` is removed ==== `fix` value for `index.shard.check_on_startup` is removed
Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported. Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported.
[float] [float]
[[elasticsearch-translog-removed]]
==== `elasticsearch-translog` is removed ==== `elasticsearch-translog` is removed
Use the `elasticsearch-shard` tool to remove corrupted translog data. Use the `elasticsearch-shard` tool to remove corrupted translog data.

View File

@ -10,6 +10,7 @@
// end::notable-breaking-changes[] // end::notable-breaking-changes[]
[float] [float]
[[isshardsacked-removed]]
==== `isShardsAcked` deprecated in `6.2` has been removed ==== `isShardsAcked` deprecated in `6.2` has been removed
`isShardsAcked` has been replaced by `isShardsAcknowledged` in `isShardsAcked` has been replaced by `isShardsAcknowledged` in
@ -17,6 +18,7 @@
`CreateIndexClusterStateUpdateResponse`. `CreateIndexClusterStateUpdateResponse`.
[float] [float]
[[prepareexecute-removed-client-api]]
==== `prepareExecute` removed from the client api ==== `prepareExecute` removed from the client api
The `prepareExecute` method which created a request builder has been The `prepareExecute` method which created a request builder has been
@ -36,18 +38,21 @@ was moved to `org.elasticsearch.search.aggregations.PipelineAggregationBuilders`
[float] [float]
[[retry-withbackoff-methods-removed]]
==== `Retry.withBackoff` methods with `Settings` removed ==== `Retry.withBackoff` methods with `Settings` removed
The variants of `Retry.withBackoff` that included `Settings` have been removed The variants of `Retry.withBackoff` that included `Settings` have been removed
because `Settings` is no longer needed. because `Settings` is no longer needed.
[float] [float]
[[client-termvector-removed]]
==== Deprecated method `Client#termVector` removed ==== Deprecated method `Client#termVector` removed
The client method `termVector`, deprecated in 2.0, has been removed. The method The client method `termVector`, deprecated in 2.0, has been removed. The method
`termVectors` (plural) should be used instead. `termVectors` (plural) should be used instead.
[float] [float]
[[abstractlifecyclecomponent-constructor-removed]]
==== Deprecated constructor `AbstractLifecycleComponent(Settings settings)` removed ==== Deprecated constructor `AbstractLifecycleComponent(Settings settings)` removed
The constructor `AbstractLifecycleComponent(Settings settings)`, deprecated in 6.7 The constructor `AbstractLifecycleComponent(Settings settings)`, deprecated in 6.7

View File

@ -82,6 +82,7 @@ compile time, ensure you have proper test coverage for this in your
own code. own code.
[float] [float]
[[parsing-gtm0-timezeone-jdk8-not-supported]]
==== Parsing `GMT0` timezone with JDK8 is not supported ==== Parsing `GMT0` timezone with JDK8 is not supported
When you are running Elasticsearch 7 with Java 8, you are not able to parse When you are running Elasticsearch 7 with Java 8, you are not able to parse

View File

@ -10,6 +10,7 @@
// end::notable-breaking-changes[] // end::notable-breaking-changes[]
[float] [float]
[[new-json-format-log-directory]]
==== New JSON format log files in `log` directory ==== New JSON format log files in `log` directory
Elasticsearch now will produce additional log files in JSON format. They will be stored in `*.json` suffix files. Elasticsearch now will produce additional log files in JSON format. They will be stored in `*.json` suffix files.
@ -28,6 +29,7 @@ Following files should be expected now in log directory:
Note: You can configure which of these files are written by editing `log4j2.properties`. Note: You can configure which of these files are written by editing `log4j2.properties`.
[float] [float]
[[log-files-ending-log-deprecated]]
==== Log files ending with `*.log` deprecated ==== Log files ending with `*.log` deprecated
Log files with the `.log` file extension using the old pattern layout format Log files with the `.log` file extension using the old pattern layout format
are now considered deprecated and the newly added JSON log file format with are now considered deprecated and the newly added JSON log file format with

View File

@ -10,6 +10,7 @@
// end::notable-breaking-changes[] // end::notable-breaking-changes[]
[float] [float]
[[maxretrytimeout-removed]]
==== Support for `maxRetryTimeout` removed from RestClient ==== Support for `maxRetryTimeout` removed from RestClient
`RestClient` and `RestClientBuilder` no longer support the `maxRetryTimeout` `RestClient` and `RestClientBuilder` no longer support the `maxRetryTimeout`

View File

@ -10,11 +10,13 @@
// end::notable-breaking-changes[] // end::notable-breaking-changes[]
[float] [float]
[[all-meta-field-removed]]
==== The `_all` meta field is removed ==== The `_all` meta field is removed
The `_all` field deprecated in 6 have now been removed. The `_all` field deprecated in 6 have now been removed.
[float] [float]
[[uid-meta-field-removed]]
==== The `_uid` meta field is removed ==== The `_uid` meta field is removed
This field used to index a composite key formed of the `_type` and the `_id`. This field used to index a composite key formed of the `_type` and the `_id`.
@ -23,6 +25,7 @@ of `_id`.
//tag::notable-breaking-changes[] //tag::notable-breaking-changes[]
[float] [float]
[[default-mapping-not-allowed]]
==== The `_default_` mapping is no longer allowed ==== The `_default_` mapping is no longer allowed
The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed
@ -31,11 +34,13 @@ an error.
//end::notable-breaking-changes[] //end::notable-breaking-changes[]
[float] [float]
[[index-options-numeric-fields-removed]]
==== `index_options` for numeric fields has been removed ==== `index_options` for numeric fields has been removed
The `index_options` field for numeric fields has been deprecated in 6 and has now been removed. The `index_options` field for numeric fields has been deprecated in 6 and has now been removed.
[float] [float]
[[limit-number-nested-json-objects]]
==== Limiting the number of `nested` json objects ==== Limiting the number of `nested` json objects
To safeguard against out of memory errors, the number of nested json objects within a single To safeguard against out of memory errors, the number of nested json objects within a single
@ -43,11 +48,13 @@ document across all fields has been limited to 10000. This default limit can be
the index setting `index.mapping.nested_objects.limit`. the index setting `index.mapping.nested_objects.limit`.
[float] [float]
[[update-all-types-option-removed]]
==== The `update_all_types` option has been removed ==== The `update_all_types` option has been removed
This option is useless now that all indices have at most one type. This option is useless now that all indices have at most one type.
[float] [float]
[[classic-similarity-removed]]
==== The `classic` similarity has been removed ==== The `classic` similarity has been removed
The `classic` similarity relied on coordination factors for scoring to be good The `classic` similarity relied on coordination factors for scoring to be good
@ -63,6 +70,7 @@ An error will now be thrown when unknown configuration options are provided
to similarities. Such unknown parameters were ignored before. to similarities. Such unknown parameters were ignored before.
[float] [float]
[[changed-default-geo-shape-index-strategy]]
==== Changed default `geo_shape` indexing strategy ==== Changed default `geo_shape` indexing strategy
`geo_shape` types now default to using a vector indexing approach based on Lucene's new `geo_shape` types now default to using a vector indexing approach based on Lucene's new
@ -76,6 +84,7 @@ should also be changed in the template to explicitly define `tree` to one of `ge
or `quadtree`. This will ensure compatibility with previously created indexes. or `quadtree`. This will ensure compatibility with previously created indexes.
[float] [float]
[[deprecated-geo-shape-params]]
==== Deprecated `geo_shape` parameters ==== Deprecated `geo_shape` parameters
The following type parameters are deprecated for the `geo_shape` field type: `tree`, The following type parameters are deprecated for the `geo_shape` field type: `tree`,
@ -90,6 +99,7 @@ to 10 in the next major version. Completion fields that define more than 10
contexts in a mapping will log a deprecation warning in this version. contexts in a mapping will log a deprecation warning in this version.
[float] [float]
[[include-type-name-defaults-false]]
==== `include_type_name` now defaults to `false` ==== `include_type_name` now defaults to `false`
The default for `include_type_name` is now `false` for all APIs that accept The default for `include_type_name` is now `false` for all APIs that accept
the parameter. the parameter.

View File

@ -84,6 +84,7 @@ Tribe node functionality has been removed in favor of
can no longer provide their own discovery implementations. can no longer provide their own discovery implementations.
[float] [float]
[[watcher-hipchat-action-removed]]
==== Watcher 'hipchat' action removed ==== Watcher 'hipchat' action removed
Hipchat has been deprecated and shut down as a service. The `hipchat` action for Hipchat has been deprecated and shut down as a service. The `hipchat` action for

View File

@ -10,6 +10,7 @@
// end::notable-breaking-changes[] // end::notable-breaking-changes[]
[float] [float]
[[remove-header-args]]
==== API methods accepting `Header` argument have been removed ==== API methods accepting `Header` argument have been removed
All API methods accepting headers as a `Header` varargs argument, deprecated All API methods accepting headers as a `Header` varargs argument, deprecated
@ -22,6 +23,7 @@ e.g. `client.index(indexRequest, new Header("name" "value"))` becomes
`client.index(indexRequest, RequestOptions.DEFAULT.toBuilder().addHeader("name", "value").build());` `client.index(indexRequest, RequestOptions.DEFAULT.toBuilder().addHeader("name", "value").build());`
[float] [float]
[[cluster-health-api-default-cluster-level]]
==== Cluster Health API default to `cluster` level ==== Cluster Health API default to `cluster` level
The Cluster Health API used to default to `shards` level to ease migration The Cluster Health API used to default to `shards` level to ease migration

View File

@ -32,6 +32,7 @@ To check if a document is missing a value, you can use
[float] [float]
[[script-errors-return-400-error-codes]]
==== Script errors will return as `400` error codes ==== Script errors will return as `400` error codes
Malformed scripts, either in search templates, ingest pipelines or search Malformed scripts, either in search templates, ingest pipelines or search

View File

@ -77,6 +77,7 @@ PUT /_cluster/settings
// CONSOLE // CONSOLE
[float] [float]
[[search-api-returns-400-invalid-requests]]
==== Search API returns `400` for invalid requests ==== Search API returns `400` for invalid requests
The Search API returns `400 - Bad request` while it would previously return The Search API returns `400 - Bad request` while it would previously return
@ -91,6 +92,7 @@ The Search API returns `400 - Bad request` while it would previously return
* script compilation errors * script compilation errors
[float] [float]
[[scroll-queries-cannot-use-request-cache]]
==== Scroll queries cannot use the `request_cache` anymore ==== Scroll queries cannot use the `request_cache` anymore
Setting `request_cache:true` on a query that creates a scroll (`scroll=1m`) Setting `request_cache:true` on a query that creates a scroll (`scroll=1m`)
@ -98,6 +100,7 @@ has been deprecated in 6 and will now return a `400 - Bad request`.
Scroll queries are not meant to be cached. Scroll queries are not meant to be cached.
[float] [float]
[[scroll-queries-cannot-use-rescore]]
==== Scroll queries cannot use `rescore` anymore ==== Scroll queries cannot use `rescore` anymore
Including a rescore clause on a query that creates a scroll (`scroll=1m`) has Including a rescore clause on a query that creates a scroll (`scroll=1m`) has
@ -117,6 +120,7 @@ removed.
* `jarowinkler` - replaced by `jaro_winkler` * `jarowinkler` - replaced by `jaro_winkler`
[float] [float]
[[popular-mode-suggesters]]
==== `popular` mode for Suggesters ==== `popular` mode for Suggesters
The `popular` mode for Suggesters (`term` and `phrase`) now uses the doc frequency The `popular` mode for Suggesters (`term` and `phrase`) now uses the doc frequency
@ -149,6 +153,7 @@ To safeguard against this, a hard limit of 1024 fields has been introduced for q
using the "all fields" mode ("default_field": "*") or other fieldname expansions (e.g. "foo*"). using the "all fields" mode ("default_field": "*") or other fieldname expansions (e.g. "foo*").
[float] [float]
[[invalid-search-request-body]]
==== Invalid `_search` request body ==== Invalid `_search` request body
Search requests with extra content after the main object will no longer be accepted Search requests with extra content after the main object will no longer be accepted
@ -176,6 +181,7 @@ For geo context the value of the `path` parameter is now validated against the m
and the context is only accepted if `path` points to a field with `geo_point` type. and the context is only accepted if `path` points to a field with `geo_point` type.
[float] [float]
[[semantics-changed-max-concurrent-shared-requests]]
==== Semantics changed for `max_concurrent_shard_requests` ==== Semantics changed for `max_concurrent_shard_requests`
`max_concurrent_shard_requests` used to limit the total number of concurrent shard `max_concurrent_shard_requests` used to limit the total number of concurrent shard
@ -183,6 +189,7 @@ requests a single high level search request can execute. In 7.0 this changed to
max number of concurrent shard requests per node. The default is now `5`. max number of concurrent shard requests per node. The default is now `5`.
[float] [float]
[[max-score-set-to-null-when-untracked]]
==== `max_score` set to `null` when scores are not tracked ==== `max_score` set to `null` when scores are not tracked
`max_score` used to be set to `0` whenever scores are not tracked. `null` is now used `max_score` used to be set to `0` whenever scores are not tracked. `null` is now used
@ -214,6 +221,7 @@ major version.
//tag::notable-breaking-changes[] //tag::notable-breaking-changes[]
[float] [float]
[[hits-total-now-object-search-response]]
==== `hits.total` is now an object in the search response ==== `hits.total` is now an object in the search response
The total hits that match the search request is now returned as an object The total hits that match the search request is now returned as an object
@ -245,6 +253,7 @@ will be removed in the next major version (8.0).
//end::notable-breaking-changes[] //end::notable-breaking-changes[]
[float] [float]
[[hits-total-omitted-if-disabled]]
==== `hits.total` is omitted in the response if `track_total_hits` is disabled (false) ==== `hits.total` is omitted in the response if `track_total_hits` is disabled (false)
If `track_total_hits` is set to `false` in the search request the search response If `track_total_hits` is set to `false` in the search request the search response
@ -254,6 +263,7 @@ to get the old format back (`"total": -1`).
//tag::notable-breaking-changes[] //tag::notable-breaking-changes[]
[float] [float]
[[track-total-hits-10000-default]]
==== `track_total_hits` defaults to 10,000 ==== `track_total_hits` defaults to 10,000
By default search request will count the total hits accurately up to `10,000` By default search request will count the total hits accurately up to `10,000`

View File

@ -10,6 +10,7 @@
// end::notable-breaking-changes[] // end::notable-breaking-changes[]
[float] [float]
[[default-node-name-now-hostname]]
==== The default for `node.name` is now the hostname ==== The default for `node.name` is now the hostname
`node.name` now defaults to the hostname at the time when Elasticsearch `node.name` now defaults to the hostname at the time when Elasticsearch

View File

@ -16,6 +16,7 @@ Snapshot stats details are provided in a new structured way:
* In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied. * In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied.
[float] [float]
[[snapshot-stats-deprecated]]
==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed ==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed
* Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`. * Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`.

View File

@ -19,11 +19,13 @@ To learn about monitoring in general, see
//NOTE: The tagged regions are re-used in the Stack Overview. //NOTE: The tagged regions are re-used in the Stack Overview.
. Enable the collection of monitoring data. Set . Enable the collection of monitoring data. +
`xpack.monitoring.collection.enabled` to `true` on each node in the production +
cluster. By default, it is is disabled (`false`).
+
-- --
// tag::enable-collection[]
Set `xpack.monitoring.collection.enabled` to `true` on each node in the
production cluster. By default, it is is disabled (`false`).
NOTE: You can specify this setting in either the `elasticsearch.yml` on each NOTE: You can specify this setting in either the `elasticsearch.yml` on each
node or across the cluster as a dynamic cluster setting. If {es} node or across the cluster as a dynamic cluster setting. If {es}
{security-features} are enabled, you must have `monitor` cluster privileges to {security-features} are enabled, you must have `monitor` cluster privileges to
@ -43,15 +45,17 @@ PUT _cluster/settings
} }
---------------------------------- ----------------------------------
// CONSOLE // CONSOLE
// end::enable-collection[]
For more information, see <<monitoring-settings>> and <<cluster-update-settings>>. For more information, see <<monitoring-settings>> and <<cluster-update-settings>>.
-- --
. Disable the default collection of {es} monitoring metrics. Set . Disable the default collection of {es} monitoring metrics. +
`xpack.monitoring.elasticsearch.collection.enabled` to `false` on each node in +
the production cluster.
+
-- --
// tag::disable-default-collection[]
Set `xpack.monitoring.elasticsearch.collection.enabled` to `false` on each node
in the production cluster.
NOTE: You can specify this setting in either the `elasticsearch.yml` on each NOTE: You can specify this setting in either the `elasticsearch.yml` on each
node or across the cluster as a dynamic cluster setting. If {es} node or across the cluster as a dynamic cluster setting. If {es}
{security-features} are enabled, you must have `monitor` cluster privileges to {security-features} are enabled, you must have `monitor` cluster privileges to
@ -70,7 +74,8 @@ PUT _cluster/settings
---------------------------------- ----------------------------------
// CONSOLE // CONSOLE
Leave `xpack.monitoring.enabled` set to its default value (`true`). Leave `xpack.monitoring.enabled` set to its default value (`true`).
// end::disable-default-collection[]
-- --
. {metricbeat-ref}/metricbeat-installation.html[Install {metricbeat}] on each . {metricbeat-ref}/metricbeat-installation.html[Install {metricbeat}] on each

View File

@ -52,12 +52,12 @@ GET /_search
"query": { <1> "query": { <1>
"bool": { <2> "bool": { <2>
"must": [ "must": [
{ "match": { "title": "Search" }}, <2> { "match": { "title": "Search" }},
{ "match": { "content": "Elasticsearch" }} <2> { "match": { "content": "Elasticsearch" }}
], ],
"filter": [ <3> "filter": [ <3>
{ "term": { "status": "published" }}, <4> { "term": { "status": "published" }},
{ "range": { "publish_date": { "gte": "2015-01-01" }}} <4> { "range": { "publish_date": { "gte": "2015-01-01" }}}
] ]
} }
} }
@ -68,11 +68,16 @@ GET /_search
<2> The `bool` and two `match` clauses are used in query context, <2> The `bool` and two `match` clauses are used in query context,
which means that they are used to score how well each document which means that they are used to score how well each document
matches. matches.
<3> The `filter` parameter indicates filter context. <3> The `filter` parameter indicates filter context. Its `term` and
<4> The `term` and `range` clauses are used in filter context. `range` clauses are used in filter context. They will filter out
They will filter out documents which do not match, but they will documents which do not match, but they will
not affect the score for matching documents. not affect the score for matching documents.
WARNING: Scores calculated for queries in query context are represented
as single precision floating point numbers; they have only
24 bits for significand's precision. Score calculations that exceed the
significand's precision will be converted to floats with loss of precision.
TIP: Use query clauses in query context for conditions which should affect the TIP: Use query clauses in query context for conditions which should affect the
score of matching documents (i.e. how well does the document match), and use score of matching documents (i.e. how well does the document match), and use
all other query clauses in filter context. all other query clauses in filter context.

View File

@ -1,51 +1,67 @@
[[query-dsl-wildcard-query]] [[query-dsl-wildcard-query]]
=== Wildcard Query === Wildcard Query
Returns documents that contain terms matching a wildcard pattern.
Matches documents that have fields matching a wildcard expression (*not A wildcard operator is a placeholder that matches one or more characters. For
analyzed*). Supported wildcards are `*`, which matches any character example, the `*` wildcard operator matches zero or more characters. You can
sequence (including the empty one), and `?`, which matches any single combine wildcard operators with other characters to create a wildcard pattern.
character. Note that this query can be slow, as it needs to iterate over many
terms. In order to prevent extremely slow wildcard queries, a wildcard [[wildcard-query-ex-request]]
term should not start with one of the wildcards `*` or `?`. The wildcard ==== Example request
query maps to Lucene `WildcardQuery`.
The following search returns documents where the `user` field contains a term
that begins with `ki` and ends with `y`. These matching terms can include `kiy`,
`kity`, or `kimchy`.
[source,js] [source,js]
-------------------------------------------------- ----
GET /_search GET /_search
{ {
"query": { "query": {
"wildcard" : { "user" : "ki*y" } "wildcard": {
"user": {
"value": "ki*y",
"boost": 1.0,
"rewrite": "constant_score"
}
}
} }
} }
-------------------------------------------------- ----
// CONSOLE // CONSOLE
A boost can also be associated with the query: [[wildcard-top-level-params]]
==== Top-level parameters for `wildcard`
`<field>`::
Field you wish to search.
[source,js] [[wildcard-query-field-params]]
-------------------------------------------------- ==== Parameters for `<field>`
GET /_search `value`::
{ Wildcard pattern for terms you wish to find in the provided `<field>`.
"query": { +
"wildcard" : { "user" : { "value" : "ki*y", "boost" : 2.0 } } --
} This parameter supports two wildcard operators:
}
--------------------------------------------------
// CONSOLE
Or : * `?`, which matches any single character
* `*`, which can match zero or more characters, including an empty one
[source,js] WARNING: Avoid beginning patterns with `*` or `?`. This can increase
-------------------------------------------------- the iterations needed to find matching terms and slow search performance.
GET /_search --
{
"query": {
"wildcard" : { "user" : { "wildcard" : "ki*y", "boost" : 2.0 } }
}
}
--------------------------------------------------
// CONSOLE
This multi term query allows to control how it gets rewritten using the `boost`::
<<query-dsl-multi-term-rewrite,rewrite>> Floating point number used to decrease or increase the
parameter. <<query-filter-context, relevance scores>> of a query. Default is `1.0`.
Optional.
+
You can use the `boost` parameter to adjust relevance scores for searches
containing two or more queries.
+
Boost values are relative to the default value of `1.0`. A boost value between
`0` and `1.0` decreases the relevance score. A value greater than `1.0`
increases the relevance score.
`rewrite` (Expert)::
Method used to rewrite the query. For valid values and more information, see the
<<query-dsl-multi-term-rewrite, `rewrite` parameter>>. Optional.

View File

@ -283,5 +283,6 @@ For example: `["elasticsearch_version_mismatch","xpack_license_expiration"]`.
:component: {monitoring} :component: {monitoring}
:verifies: :verifies:
:server!: :server!:
:ssl-context: monitoring
include::ssl-settings.asciidoc[] include::ssl-settings.asciidoc[]

View File

@ -85,6 +85,7 @@ corresponding endpoints are whitelisted as well.
:component: {watcher} :component: {watcher}
:verifies: :verifies:
:server!: :server!:
:ssl-context: watcher
include::ssl-settings.asciidoc[] include::ssl-settings.asciidoc[]

View File

@ -1582,6 +1582,7 @@ a PKCS#12 container includes trusted certificate ("anchor") entries look for
:client-auth-default: none :client-auth-default: none
:verifies!: :verifies!:
:server: :server:
:ssl-context: security-http
include::ssl-settings.asciidoc[] include::ssl-settings.asciidoc[]
@ -1591,6 +1592,7 @@ include::ssl-settings.asciidoc[]
:client-auth-default!: :client-auth-default!:
:verifies: :verifies:
:server: :server:
:ssl-context: security-transport
include::ssl-settings.asciidoc[] include::ssl-settings.asciidoc[]

View File

@ -1,4 +1,3 @@
==== {component} TLS/SSL Settings ==== {component} TLS/SSL Settings
You can configure the following TLS/SSL settings. If the settings are not configured, You can configure the following TLS/SSL settings. If the settings are not configured,
the {ref}/security-settings.html#ssl-tls-settings[Default TLS/SSL Settings] the {ref}/security-settings.html#ssl-tls-settings[Default TLS/SSL Settings]
@ -39,7 +38,13 @@ endif::verifies[]
Supported cipher suites can be found in Oracle's http://docs.oracle.com/javase/8/docs/technotes/guides/security/SunProviders.html[ Supported cipher suites can be found in Oracle's http://docs.oracle.com/javase/8/docs/technotes/guides/security/SunProviders.html[
Java Cryptography Architecture documentation]. Defaults to ``. Java Cryptography Architecture documentation]. Defaults to ``.
ifdef::asciidoctor[]
[#{ssl-context}-tls-ssl-key-trusted-certificate-settings]
===== {component} TLS/SSL Key and Trusted Certificate Settings ===== {component} TLS/SSL Key and Trusted Certificate Settings
endif::[]
ifndef::asciidoctor[]
===== anchor:{ssl-context}-tls-ssl-key-trusted-certificate-settings[] {component} TLS/SSL Key and Trusted Certificate Settings
endif::[]
The following settings are used to specify a private key, certificate, and the The following settings are used to specify a private key, certificate, and the
trusted certificates that should be used when communicating over an SSL/TLS connection. trusted certificates that should be used when communicating over an SSL/TLS connection.
@ -105,7 +110,13 @@ Password to the truststore.
+{ssl-prefix}.ssl.truststore.secure_password+ (<<secure-settings,Secure>>):: +{ssl-prefix}.ssl.truststore.secure_password+ (<<secure-settings,Secure>>)::
Password to the truststore. Password to the truststore.
ifdef::asciidoctor[]
[#{ssl-context}-pkcs12-files]
===== PKCS#12 Files ===== PKCS#12 Files
endif::[]
ifndef::asciidoctor[]
===== anchor:{ssl-context}-pkcs12-files[] PKCS#12 Files
endif::[]
{es} can be configured to use PKCS#12 container files (`.p12` or `.pfx` files) {es} can be configured to use PKCS#12 container files (`.p12` or `.pfx` files)
that contain the private key, certificate and certificates that should be trusted. that contain the private key, certificate and certificates that should be trusted.
@ -143,7 +154,13 @@ Password to the PKCS#12 file.
+{ssl-prefix}.ssl.truststore.secure_password+ (<<secure-settings,Secure>>):: +{ssl-prefix}.ssl.truststore.secure_password+ (<<secure-settings,Secure>>)::
Password to the PKCS#12 file. Password to the PKCS#12 file.
ifdef::asciidoctor[]
[#{ssl-context}-pkcs11-tokens]
===== PKCS#11 Tokens ===== PKCS#11 Tokens
endif::[]
ifndef::asciidoctor[]
===== anchor:{ssl-context}-pkcs11-tokens[] PKCS#11 Tokens
endif::[]
{es} can be configured to use a PKCS#11 token that contains the private key, {es} can be configured to use a PKCS#11 token that contains the private key,
certificate and certificates that should be trusted. certificate and certificates that should be trusted.

View File

@ -56,13 +56,17 @@ public class WellKnownText {
private static final String EOF = "END-OF-STREAM"; private static final String EOF = "END-OF-STREAM";
private static final String EOL = "END-OF-LINE"; private static final String EOL = "END-OF-LINE";
public static String toWKT(Geometry geometry) { public WellKnownText() {
}
public String toWKT(Geometry geometry) {
StringBuilder builder = new StringBuilder(); StringBuilder builder = new StringBuilder();
toWKT(geometry, builder); toWKT(geometry, builder);
return builder.toString(); return builder.toString();
} }
public static void toWKT(Geometry geometry, StringBuilder sb) { public void toWKT(Geometry geometry, StringBuilder sb) {
sb.append(getWKTName(geometry)); sb.append(getWKTName(geometry));
sb.append(SPACE); sb.append(SPACE);
if (geometry.isEmpty()) { if (geometry.isEmpty()) {
@ -216,7 +220,7 @@ public class WellKnownText {
} }
} }
public static Geometry fromWKT(String wkt) throws IOException, ParseException { public Geometry fromWKT(String wkt) throws IOException, ParseException {
StringReader reader = new StringReader(wkt); StringReader reader = new StringReader(wkt);
try { try {
// setup the tokenizer; configured to read words w/o numbers // setup the tokenizer; configured to read words w/o numbers

View File

@ -53,9 +53,10 @@ abstract class BaseGeometryTestCase<T extends Geometry> extends AbstractWireTest
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Override @Override
protected T copyInstance(T instance, Version version) throws IOException { protected T copyInstance(T instance, Version version) throws IOException {
String text = WellKnownText.toWKT(instance); WellKnownText wkt = new WellKnownText();
String text = wkt.toWKT(instance);
try { try {
return (T) WellKnownText.fromWKT(text); return (T) wkt.fromWKT(text);
} catch (ParseException e) { } catch (ParseException e) {
throw new ElasticsearchException(e); throw new ElasticsearchException(e);
} }

View File

@ -36,14 +36,15 @@ public class CircleTests extends BaseGeometryTestCase<Circle> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
assertEquals("circle (20.0 10.0 15.0)", WellKnownText.toWKT(new Circle(10, 20, 15))); WellKnownText wkt = new WellKnownText();
assertEquals(new Circle(10, 20, 15), WellKnownText.fromWKT("circle (20.0 10.0 15.0)")); assertEquals("circle (20.0 10.0 15.0)", wkt.toWKT(new Circle(10, 20, 15)));
assertEquals(new Circle(10, 20, 15), wkt.fromWKT("circle (20.0 10.0 15.0)"));
assertEquals("circle (20.0 10.0 15.0 25.0)", WellKnownText.toWKT(new Circle(10, 20, 25, 15))); assertEquals("circle (20.0 10.0 15.0 25.0)", wkt.toWKT(new Circle(10, 20, 25, 15)));
assertEquals(new Circle(10, 20, 25, 15), WellKnownText.fromWKT("circle (20.0 10.0 15.0 25.0)")); assertEquals(new Circle(10, 20, 25, 15), wkt.fromWKT("circle (20.0 10.0 15.0 25.0)"));
assertEquals("circle EMPTY", WellKnownText.toWKT(Circle.EMPTY)); assertEquals("circle EMPTY", wkt.toWKT(Circle.EMPTY));
assertEquals(Circle.EMPTY, WellKnownText.fromWKT("circle EMPTY)")); assertEquals(Circle.EMPTY, wkt.fromWKT("circle EMPTY)"));
} }
public void testInitValidation() { public void testInitValidation() {

View File

@ -35,14 +35,15 @@ public class GeometryCollectionTests extends BaseGeometryTestCase<GeometryCollec
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
WellKnownText wkt = new WellKnownText();
assertEquals("geometrycollection (point (20.0 10.0),point EMPTY)", assertEquals("geometrycollection (point (20.0 10.0),point EMPTY)",
WellKnownText.toWKT(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY)))); wkt.toWKT(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY))));
assertEquals(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY)), assertEquals(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY)),
WellKnownText.fromWKT("geometrycollection (point (20.0 10.0),point EMPTY)")); wkt.fromWKT("geometrycollection (point (20.0 10.0),point EMPTY)"));
assertEquals("geometrycollection EMPTY", WellKnownText.toWKT(GeometryCollection.EMPTY)); assertEquals("geometrycollection EMPTY", wkt.toWKT(GeometryCollection.EMPTY));
assertEquals(GeometryCollection.EMPTY, WellKnownText.fromWKT("geometrycollection EMPTY)")); assertEquals(GeometryCollection.EMPTY, wkt.fromWKT("geometrycollection EMPTY)"));
} }
@SuppressWarnings("ConstantConditions") @SuppressWarnings("ConstantConditions")

View File

@ -31,16 +31,17 @@ public class LineTests extends BaseGeometryTestCase<Line> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
assertEquals("linestring (3.0 1.0, 4.0 2.0)", WellKnownText.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4}))); WellKnownText wkt = new WellKnownText();
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}), WellKnownText.fromWKT("linestring (3 1, 4 2)")); assertEquals("linestring (3.0 1.0, 4.0 2.0)", wkt.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4})));
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}), wkt.fromWKT("linestring (3 1, 4 2)"));
assertEquals("linestring (3.0 1.0 5.0, 4.0 2.0 6.0)", WellKnownText.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4}, assertEquals("linestring (3.0 1.0 5.0, 4.0 2.0 6.0)", wkt.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4},
new double[]{5, 6}))); new double[]{5, 6})));
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5}), assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5}),
WellKnownText.fromWKT("linestring (3 1 6, 4 2 5)")); wkt.fromWKT("linestring (3 1 6, 4 2 5)"));
assertEquals("linestring EMPTY", WellKnownText.toWKT(Line.EMPTY)); assertEquals("linestring EMPTY", wkt.toWKT(Line.EMPTY));
assertEquals(Line.EMPTY, WellKnownText.fromWKT("linestring EMPTY)")); assertEquals(Line.EMPTY, wkt.fromWKT("linestring EMPTY)"));
} }
public void testInitValidation() { public void testInitValidation() {

View File

@ -26,7 +26,7 @@ public class LinearRingTests extends ESTestCase {
public void testBasicSerialization() { public void testBasicSerialization() {
UnsupportedOperationException ex = expectThrows(UnsupportedOperationException.class, UnsupportedOperationException ex = expectThrows(UnsupportedOperationException.class,
() -> WellKnownText.toWKT(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))); () -> new WellKnownText().toWKT(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})));
assertEquals("line ring cannot be serialized using WKT", ex.getMessage()); assertEquals("line ring cannot be serialized using WKT", ex.getMessage());
} }

View File

@ -40,12 +40,13 @@ public class MultiLineTests extends BaseGeometryTestCase<MultiLine> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
assertEquals("multilinestring ((3.0 1.0, 4.0 2.0))", WellKnownText.toWKT( WellKnownText wkt = new WellKnownText();
assertEquals("multilinestring ((3.0 1.0, 4.0 2.0))", wkt.toWKT(
new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}))))); new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4})))));
assertEquals(new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}))), assertEquals(new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}))),
WellKnownText.fromWKT("multilinestring ((3 1, 4 2))")); wkt.fromWKT("multilinestring ((3 1, 4 2))"));
assertEquals("multilinestring EMPTY", WellKnownText.toWKT(MultiLine.EMPTY)); assertEquals("multilinestring EMPTY", wkt.toWKT(MultiLine.EMPTY));
assertEquals(MultiLine.EMPTY, WellKnownText.fromWKT("multilinestring EMPTY)")); assertEquals(MultiLine.EMPTY, wkt.fromWKT("multilinestring EMPTY)"));
} }
} }

View File

@ -41,22 +41,23 @@ public class MultiPointTests extends BaseGeometryTestCase<MultiPoint> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
assertEquals("multipoint (2.0 1.0)", WellKnownText.toWKT( WellKnownText wkt = new WellKnownText();
assertEquals("multipoint (2.0 1.0)", wkt.toWKT(
new MultiPoint(Collections.singletonList(new Point(1, 2))))); new MultiPoint(Collections.singletonList(new Point(1, 2)))));
assertEquals(new MultiPoint(Collections.singletonList(new Point(1 ,2))), assertEquals(new MultiPoint(Collections.singletonList(new Point(1 ,2))),
WellKnownText.fromWKT("multipoint (2 1)")); wkt.fromWKT("multipoint (2 1)"));
assertEquals("multipoint (2.0 1.0, 3.0 4.0)", assertEquals("multipoint (2.0 1.0, 3.0 4.0)",
WellKnownText.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3))))); wkt.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3)))));
assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3))), assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3))),
WellKnownText.fromWKT("multipoint (2 1, 3 4)")); wkt.fromWKT("multipoint (2 1, 3 4)"));
assertEquals("multipoint (2.0 1.0 10.0, 3.0 4.0 20.0)", assertEquals("multipoint (2.0 1.0 10.0, 3.0 4.0 20.0)",
WellKnownText.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20))))); wkt.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20)))));
assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20))), assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20))),
WellKnownText.fromWKT("multipoint (2 1 10, 3 4 20)")); wkt.fromWKT("multipoint (2 1 10, 3 4 20)"));
assertEquals("multipoint EMPTY", WellKnownText.toWKT(MultiPoint.EMPTY)); assertEquals("multipoint EMPTY", wkt.toWKT(MultiPoint.EMPTY));
assertEquals(MultiPoint.EMPTY, WellKnownText.fromWKT("multipoint EMPTY)")); assertEquals(MultiPoint.EMPTY, wkt.fromWKT("multipoint EMPTY)"));
} }
} }

View File

@ -40,14 +40,15 @@ public class MultiPolygonTests extends BaseGeometryTestCase<MultiPolygon> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
WellKnownText wkt = new WellKnownText();
assertEquals("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))", assertEquals("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))",
WellKnownText.toWKT(new MultiPolygon(Collections.singletonList( wkt.toWKT(new MultiPolygon(Collections.singletonList(
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))))); new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))))));
assertEquals(new MultiPolygon(Collections.singletonList( assertEquals(new MultiPolygon(Collections.singletonList(
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))), new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))),
WellKnownText.fromWKT("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))")); wkt.fromWKT("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))"));
assertEquals("multipolygon EMPTY", WellKnownText.toWKT(MultiPolygon.EMPTY)); assertEquals("multipolygon EMPTY", wkt.toWKT(MultiPolygon.EMPTY));
assertEquals(MultiPolygon.EMPTY, WellKnownText.fromWKT("multipolygon EMPTY)")); assertEquals(MultiPolygon.EMPTY, wkt.fromWKT("multipolygon EMPTY)"));
} }
} }

View File

@ -31,14 +31,15 @@ public class PointTests extends BaseGeometryTestCase<Point> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
assertEquals("point (20.0 10.0)", WellKnownText.toWKT(new Point(10, 20))); WellKnownText wkt = new WellKnownText();
assertEquals(new Point(10, 20), WellKnownText.fromWKT("point (20.0 10.0)")); assertEquals("point (20.0 10.0)", wkt.toWKT(new Point(10, 20)));
assertEquals(new Point(10, 20), wkt.fromWKT("point (20.0 10.0)"));
assertEquals("point (20.0 10.0 100.0)", WellKnownText.toWKT(new Point(10, 20, 100))); assertEquals("point (20.0 10.0 100.0)", wkt.toWKT(new Point(10, 20, 100)));
assertEquals(new Point(10, 20, 100), WellKnownText.fromWKT("point (20.0 10.0 100.0)")); assertEquals(new Point(10, 20, 100), wkt.fromWKT("point (20.0 10.0 100.0)"));
assertEquals("point EMPTY", WellKnownText.toWKT(Point.EMPTY)); assertEquals("point EMPTY", wkt.toWKT(Point.EMPTY));
assertEquals(Point.EMPTY, WellKnownText.fromWKT("point EMPTY)")); assertEquals(Point.EMPTY, wkt.fromWKT("point EMPTY)"));
} }
public void testInitValidation() { public void testInitValidation() {

View File

@ -32,18 +32,19 @@ public class PolygonTests extends BaseGeometryTestCase<Polygon> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
WellKnownText wkt = new WellKnownText();
assertEquals("polygon ((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0))", assertEquals("polygon ((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0))",
WellKnownText.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))); wkt.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))));
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})), assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})),
WellKnownText.fromWKT("polygon ((3 1, 4 2, 5 3, 3 1))")); wkt.fromWKT("polygon ((3 1, 4 2, 5 3, 3 1))"));
assertEquals("polygon ((3.0 1.0 5.0, 4.0 2.0 4.0, 5.0 3.0 3.0, 3.0 1.0 5.0))", assertEquals("polygon ((3.0 1.0 5.0, 4.0 2.0 4.0, 5.0 3.0 3.0, 3.0 1.0 5.0))",
WellKnownText.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5})))); wkt.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5}))));
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5})), assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5})),
WellKnownText.fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))")); wkt.fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))"));
assertEquals("polygon EMPTY", WellKnownText.toWKT(Polygon.EMPTY)); assertEquals("polygon EMPTY", wkt.toWKT(Polygon.EMPTY));
assertEquals(Polygon.EMPTY, WellKnownText.fromWKT("polygon EMPTY)")); assertEquals(Polygon.EMPTY, wkt.fromWKT("polygon EMPTY)"));
} }
public void testInitValidation() { public void testInitValidation() {

View File

@ -32,11 +32,12 @@ public class RectangleTests extends BaseGeometryTestCase<Rectangle> {
} }
public void testBasicSerialization() throws IOException, ParseException { public void testBasicSerialization() throws IOException, ParseException {
assertEquals("bbox (10.0, 20.0, 40.0, 30.0)", WellKnownText.toWKT(new Rectangle(30, 40, 10, 20))); WellKnownText wkt = new WellKnownText();
assertEquals(new Rectangle(30, 40, 10, 20), WellKnownText.fromWKT("bbox (10.0, 20.0, 40.0, 30.0)")); assertEquals("bbox (10.0, 20.0, 40.0, 30.0)", wkt.toWKT(new Rectangle(30, 40, 10, 20)));
assertEquals(new Rectangle(30, 40, 10, 20), wkt.fromWKT("bbox (10.0, 20.0, 40.0, 30.0)"));
assertEquals("bbox EMPTY", WellKnownText.toWKT(Rectangle.EMPTY)); assertEquals("bbox EMPTY", wkt.toWKT(Rectangle.EMPTY));
assertEquals(Rectangle.EMPTY, WellKnownText.fromWKT("bbox EMPTY)")); assertEquals(Rectangle.EMPTY, wkt.fromWKT("bbox EMPTY)"));
} }
public void testInitValidation() { public void testInitValidation() {

View File

@ -38,20 +38,37 @@ class ExpressionAggregationScript implements AggregationScript.LeafFactory {
final Expression exprScript; final Expression exprScript;
final SimpleBindings bindings; final SimpleBindings bindings;
final DoubleValuesSource source; final DoubleValuesSource source;
final boolean needsScore;
final ReplaceableConstDoubleValueSource specialValue; // _value final ReplaceableConstDoubleValueSource specialValue; // _value
ExpressionAggregationScript(Expression e, SimpleBindings b, ReplaceableConstDoubleValueSource v) { ExpressionAggregationScript(Expression e, SimpleBindings b, boolean n, ReplaceableConstDoubleValueSource v) {
exprScript = e; exprScript = e;
bindings = b; bindings = b;
source = exprScript.getDoubleValuesSource(bindings); source = exprScript.getDoubleValuesSource(bindings);
needsScore = n;
specialValue = v; specialValue = v;
} }
@Override
public boolean needs_score() {
return needsScore;
}
@Override @Override
public AggregationScript newInstance(final LeafReaderContext leaf) throws IOException { public AggregationScript newInstance(final LeafReaderContext leaf) throws IOException {
return new AggregationScript() { return new AggregationScript() {
// Fake the scorer until setScorer is called. // Fake the scorer until setScorer is called.
DoubleValues values = source.getValues(leaf, null); DoubleValues values = source.getValues(leaf, new DoubleValues() {
@Override
public double doubleValue() throws IOException {
return get_score().doubleValue();
}
@Override
public boolean advanceExact(int doc) throws IOException {
return true;
}
});
@Override @Override
public Object execute() { public Object execute() {
@ -84,10 +101,4 @@ class ExpressionAggregationScript implements AggregationScript.LeafFactory {
} }
}; };
} }
@Override
public boolean needs_score() {
return false;
}
} }

View File

@ -221,10 +221,14 @@ public class ExpressionScriptEngine implements ScriptEngine {
// NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings,
// instead of complicating SimpleBindings (which should stay simple) // instead of complicating SimpleBindings (which should stay simple)
SimpleBindings bindings = new SimpleBindings(); SimpleBindings bindings = new SimpleBindings();
boolean needsScores = false;
ReplaceableConstDoubleValueSource specialValue = null; ReplaceableConstDoubleValueSource specialValue = null;
for (String variable : expr.variables) { for (String variable : expr.variables) {
try { try {
if (variable.equals("_value")) { if (variable.equals("_score")) {
bindings.add(new SortField("_score", SortField.Type.SCORE));
needsScores = true;
} else if (variable.equals("_value")) {
specialValue = new ReplaceableConstDoubleValueSource(); specialValue = new ReplaceableConstDoubleValueSource();
bindings.add("_value", specialValue); bindings.add("_value", specialValue);
// noop: _value is special for aggregations, and is handled in ExpressionScriptBindings // noop: _value is special for aggregations, and is handled in ExpressionScriptBindings
@ -237,6 +241,7 @@ public class ExpressionScriptEngine implements ScriptEngine {
// delegate valuesource creation based on field's type // delegate valuesource creation based on field's type
// there are three types of "fields" to expressions, and each one has a different "api" of variables and methods. // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods.
final ValueSource valueSource = getDocValueSource(variable, lookup); final ValueSource valueSource = getDocValueSource(variable, lookup);
needsScores |= valueSource.getSortField(false).needsScores();
bindings.add(variable, valueSource.asDoubleValuesSource()); bindings.add(variable, valueSource.asDoubleValuesSource());
} }
} catch (Exception e) { } catch (Exception e) {
@ -244,7 +249,7 @@ public class ExpressionScriptEngine implements ScriptEngine {
throw convertToScriptException("link error", expr.sourceText, variable, e); throw convertToScriptException("link error", expr.sourceText, variable, e);
} }
} }
return new ExpressionAggregationScript(expr, bindings, specialValue); return new ExpressionAggregationScript(expr, bindings, needsScores, specialValue);
} }
private FieldScript.LeafFactory newFieldScript(Expression expr, SearchLookup lookup, @Nullable Map<String, Object> vars) { private FieldScript.LeafFactory newFieldScript(Expression expr, SearchLookup lookup, @Nullable Map<String, Object> vars) {

View File

@ -28,8 +28,8 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.ScriptType;
@ -120,7 +120,7 @@ public class MoreExpressionTests extends ESIntegTestCase {
client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"), client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"),
client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"), client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"),
client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye")); client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye"));
ScoreFunctionBuilder<?> score = ScoreFunctionBuilders.scriptFunction( ScriptScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction(
new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()));
SearchRequestBuilder req = client().prepareSearch().setIndices("test"); SearchRequestBuilder req = client().prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE));
@ -132,6 +132,15 @@ public class MoreExpressionTests extends ESIntegTestCase {
assertEquals("1", hits.getAt(0).getId()); assertEquals("1", hits.getAt(0).getId());
assertEquals("3", hits.getAt(1).getId()); assertEquals("3", hits.getAt(1).getId());
assertEquals("2", hits.getAt(2).getId()); assertEquals("2", hits.getAt(2).getId());
req = client().prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE));
score = ScoreFunctionBuilders.scriptFunction(
new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()));
req.addAggregation(AggregationBuilders.max("max_score").script((score).getScript()));
req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent
rsp = req.get();
assertSearchResponse(rsp);
} }
public void testDateMethods() throws Exception { public void testDateMethods() throws Exception {

View File

@ -25,9 +25,25 @@ setup:
rest_total_hits_as_int: true rest_total_hits_as_int: true
body: body:
script_fields: script_fields:
my_field : my_field:
script: script:
lang: expression lang: expression
source: 'doc["age"].value + 19' source: 'doc["age"].value + 19'
- match: { hits.hits.0.fields.my_field.0: 42.0 } - match: { hits.hits.0.fields.my_field.0: 42.0 }
---
"Expressions aggregation score test":
- do:
search:
rest_total_hits_as_int: true
body:
aggs:
max_score:
max:
script:
lang: expression
source: '_score'
- match: { aggregations.max_score.value: 1.0 }

View File

@ -23,8 +23,8 @@ import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel; import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerAdapter;
import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption; import io.netty.channel.ChannelOption;
import io.netty.channel.FixedRecvByteBufAllocator; import io.netty.channel.FixedRecvByteBufAllocator;
@ -351,7 +351,7 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport {
} }
@ChannelHandler.Sharable @ChannelHandler.Sharable
private static class ServerChannelExceptionHandler extends ChannelHandlerAdapter { private static class ServerChannelExceptionHandler extends ChannelInboundHandlerAdapter {
private final Netty4HttpServerTransport transport; private final Netty4HttpServerTransport transport;

View File

@ -35,7 +35,6 @@ import org.elasticsearch.plugins.NetworkPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.netty4.Netty4Transport; import org.elasticsearch.transport.netty4.Netty4Transport;
import org.elasticsearch.transport.netty4.Netty4Utils;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -45,10 +44,6 @@ import java.util.function.Supplier;
public class Netty4Plugin extends Plugin implements NetworkPlugin { public class Netty4Plugin extends Plugin implements NetworkPlugin {
static {
Netty4Utils.setup();
}
public static final String NETTY_TRANSPORT_NAME = "netty4"; public static final String NETTY_TRANSPORT_NAME = "netty4";
public static final String NETTY_HTTP_TRANSPORT_NAME = "netty4"; public static final String NETTY_HTTP_TRANSPORT_NAME = "netty4";

View File

@ -25,8 +25,8 @@ import io.netty.channel.AdaptiveRecvByteBufAllocator;
import io.netty.channel.Channel; import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerAdapter;
import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption; import io.netty.channel.ChannelOption;
import io.netty.channel.FixedRecvByteBufAllocator; import io.netty.channel.FixedRecvByteBufAllocator;
@ -315,7 +315,7 @@ public class Netty4Transport extends TcpTransport {
} }
@ChannelHandler.Sharable @ChannelHandler.Sharable
private class ServerChannelExceptionHandler extends ChannelHandlerAdapter { private class ServerChannelExceptionHandler extends ChannelInboundHandlerAdapter {
@Override @Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {

View File

@ -36,6 +36,9 @@ setup:
--- ---
"pre_filter_shard_size with invalid parameter": "pre_filter_shard_size with invalid parameter":
- skip:
version: "all"
reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42679"
- do: - do:
catch: /preFilterShardSize must be >= 1/ catch: /preFilterShardSize must be >= 1/
search: search:
@ -45,6 +48,9 @@ setup:
--- ---
"pre_filter_shard_size with shards that have no hit": "pre_filter_shard_size with shards that have no hit":
- skip:
version: "all"
reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42679"
- do: - do:
index: index:
index: index_1 index: index_1

View File

@ -140,6 +140,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final Version V_7_1_0 = new Version(V_7_1_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_1_0 = new Version(V_7_1_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_1_1_ID = 7010199; public static final int V_7_1_1_ID = 7010199;
public static final Version V_7_1_1 = new Version(V_7_1_1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_1_1 = new Version(V_7_1_1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_1_2_ID = 7010299;
public static final Version V_7_1_2 = new Version(V_7_1_2_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_2_0_ID = 7020099; public static final int V_7_2_0_ID = 7020099;
public static final Version V_7_2_0 = new Version(V_7_2_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version V_7_2_0 = new Version(V_7_2_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_3_0_ID = 7030099; public static final int V_7_3_0_ID = 7030099;
@ -161,6 +163,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
return V_7_3_0; return V_7_3_0;
case V_7_2_0_ID: case V_7_2_0_ID:
return V_7_2_0; return V_7_2_0;
case V_7_1_2_ID:
return V_7_1_2;
case V_7_1_1_ID: case V_7_1_1_ID:
return V_7_1_1; return V_7_1_1;
case V_7_1_0_ID: case V_7_1_0_ID:

View File

@ -378,7 +378,9 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
} }
/** /**
* Indicates if the total hit count for the query should be tracked. Defaults to {@code true} * Indicates if the total hit count for the query should be tracked. Requests will count total hit count accurately
* up to 10,000 by default, see {@link #setTrackTotalHitsUpTo(int)} to change this value or set to true/false to always/never
* count accurately.
*/ */
public SearchRequestBuilder setTrackTotalHits(boolean trackTotalHits) { public SearchRequestBuilder setTrackTotalHits(boolean trackTotalHits) {
sourceBuilder().trackTotalHits(trackTotalHits); sourceBuilder().trackTotalHits(trackTotalHits);
@ -386,7 +388,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
} }
/** /**
* Indicates if the total hit count for the query should be tracked. Defaults to {@code true} * Indicates the total hit count that should be tracked accurately or null if the value is unset. Defaults to 10,000.
*/ */
public SearchRequestBuilder setTrackTotalHitsUpTo(int trackTotalHitsUpTo) { public SearchRequestBuilder setTrackTotalHitsUpTo(int trackTotalHitsUpTo) {
sourceBuilder().trackTotalHitsUpTo(trackTotalHitsUpTo); sourceBuilder().trackTotalHitsUpTo(trackTotalHitsUpTo);

View File

@ -170,7 +170,7 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery
new HandshakingTransportAddressConnector(settings, transportService), configuredHostsResolver); new HandshakingTransportAddressConnector(settings, transportService), configuredHostsResolver);
this.publicationHandler = new PublicationTransportHandler(transportService, namedWriteableRegistry, this.publicationHandler = new PublicationTransportHandler(transportService, namedWriteableRegistry,
this::handlePublishRequest, this::handleApplyCommit); this::handlePublishRequest, this::handleApplyCommit);
this.leaderChecker = new LeaderChecker(settings, transportService, getOnLeaderFailure()); this.leaderChecker = new LeaderChecker(settings, transportService, this::onLeaderFailure);
this.followersChecker = new FollowersChecker(settings, transportService, this::onFollowerCheckRequest, this::removeNode); this.followersChecker = new FollowersChecker(settings, transportService, this::onFollowerCheckRequest, this::removeNode);
this.nodeRemovalExecutor = new NodeRemovalClusterStateTaskExecutor(allocationService, logger); this.nodeRemovalExecutor = new NodeRemovalClusterStateTaskExecutor(allocationService, logger);
this.clusterApplier = clusterApplier; this.clusterApplier = clusterApplier;
@ -191,20 +191,14 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery
StreamSupport.stream(peerFinder.getFoundPeers().spliterator(), false).collect(Collectors.toList()), getCurrentTerm()); StreamSupport.stream(peerFinder.getFoundPeers().spliterator(), false).collect(Collectors.toList()), getCurrentTerm());
} }
private Runnable getOnLeaderFailure() { private void onLeaderFailure(Exception e) {
return new Runnable() { synchronized (mutex) {
@Override if (mode != Mode.CANDIDATE) {
public void run() { assert lastKnownLeader.isPresent();
synchronized (mutex) { logger.info(new ParameterizedMessage("master node [{}] failed, restarting discovery", lastKnownLeader.get()), e);
becomeCandidate("onLeaderFailure");
}
} }
becomeCandidate("onLeaderFailure");
@Override }
public String toString() {
return "notification of leader failure";
}
};
} }
private void removeNode(DiscoveryNode discoveryNode, String reason) { private void removeNode(DiscoveryNode discoveryNode, String reason) {

View File

@ -23,6 +23,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
@ -35,6 +36,7 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.discovery.zen.MasterFaultDetection; import org.elasticsearch.discovery.zen.MasterFaultDetection;
import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.threadpool.ThreadPool.Names;
import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.NodeDisconnectedException;
import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportConnectionListener;
import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequest;
@ -50,6 +52,7 @@ import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
/** /**
* The LeaderChecker is responsible for allowing followers to check that the currently elected leader is still connected and healthy. We are * The LeaderChecker is responsible for allowing followers to check that the currently elected leader is still connected and healthy. We are
@ -83,13 +86,13 @@ public class LeaderChecker {
private final TimeValue leaderCheckTimeout; private final TimeValue leaderCheckTimeout;
private final int leaderCheckRetryCount; private final int leaderCheckRetryCount;
private final TransportService transportService; private final TransportService transportService;
private final Runnable onLeaderFailure; private final Consumer<Exception> onLeaderFailure;
private AtomicReference<CheckScheduler> currentChecker = new AtomicReference<>(); private AtomicReference<CheckScheduler> currentChecker = new AtomicReference<>();
private volatile DiscoveryNodes discoveryNodes; private volatile DiscoveryNodes discoveryNodes;
public LeaderChecker(final Settings settings, final TransportService transportService, final Runnable onLeaderFailure) { public LeaderChecker(final Settings settings, final TransportService transportService, final Consumer<Exception> onLeaderFailure) {
this.settings = settings; this.settings = settings;
leaderCheckInterval = LEADER_CHECK_INTERVAL_SETTING.get(settings); leaderCheckInterval = LEADER_CHECK_INTERVAL_SETTING.get(settings);
leaderCheckTimeout = LEADER_CHECK_TIMEOUT_SETTING.get(settings); leaderCheckTimeout = LEADER_CHECK_TIMEOUT_SETTING.get(settings);
@ -260,16 +263,19 @@ public class LeaderChecker {
} }
if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) { if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) {
logger.debug(new ParameterizedMessage("leader [{}] disconnected, failing immediately", leader), exp); logger.debug(new ParameterizedMessage(
leaderFailed(); "leader [{}] disconnected during check", leader), exp);
leaderFailed(new ConnectTransportException(leader, "disconnected during check", exp));
return; return;
} }
long failureCount = failureCountSinceLastSuccess.incrementAndGet(); long failureCount = failureCountSinceLastSuccess.incrementAndGet();
if (failureCount >= leaderCheckRetryCount) { if (failureCount >= leaderCheckRetryCount) {
logger.debug(new ParameterizedMessage("{} consecutive failures (limit [{}] is {}) so leader [{}] has failed", logger.debug(new ParameterizedMessage(
failureCount, LEADER_CHECK_RETRY_COUNT_SETTING.getKey(), leaderCheckRetryCount, leader), exp); "leader [{}] has failed {} consecutive checks (limit [{}] is {}); last failure was:",
leaderFailed(); leader, failureCount, LEADER_CHECK_RETRY_COUNT_SETTING.getKey(), leaderCheckRetryCount), exp);
leaderFailed(new ElasticsearchException(
"node [" + leader + "] failed [" + failureCount + "] consecutive checks", exp));
return; return;
} }
@ -285,9 +291,19 @@ public class LeaderChecker {
}); });
} }
void leaderFailed() { void leaderFailed(Exception e) {
if (isClosed.compareAndSet(false, true)) { if (isClosed.compareAndSet(false, true)) {
transportService.getThreadPool().generic().execute(onLeaderFailure); transportService.getThreadPool().generic().execute(new Runnable() {
@Override
public void run() {
onLeaderFailure.accept(e);
}
@Override
public String toString() {
return "notification of leader failure: " + e.getMessage();
}
});
} else { } else {
logger.trace("already closed, not failing leader"); logger.trace("already closed, not failing leader");
} }
@ -295,7 +311,8 @@ public class LeaderChecker {
void handleDisconnectedNode(DiscoveryNode discoveryNode) { void handleDisconnectedNode(DiscoveryNode discoveryNode) {
if (discoveryNode.equals(leader)) { if (discoveryNode.equals(leader)) {
leaderFailed(); logger.debug("leader [{}] disconnected", leader);
leaderFailed(new NodeDisconnectedException(discoveryNode, "disconnected"));
} }
} }

View File

@ -64,14 +64,20 @@ public final class GeoJson {
private static final ParseField FIELD_ORIENTATION = new ParseField("orientation"); private static final ParseField FIELD_ORIENTATION = new ParseField("orientation");
private static final ParseField FIELD_RADIUS = new ParseField("radius"); private static final ParseField FIELD_RADIUS = new ParseField("radius");
private GeoJson() { private final boolean rightOrientation;
private final boolean coerce;
private final boolean ignoreZValue;
public GeoJson(boolean rightOrientation, boolean coerce, boolean ignoreZValue) {
this.rightOrientation = rightOrientation;
this.coerce = coerce;
this.ignoreZValue = ignoreZValue;
} }
public static Geometry fromXContent(XContentParser parser, boolean rightOrientation, boolean coerce, boolean ignoreZValue) public Geometry fromXContent(XContentParser parser)
throws IOException { throws IOException {
try (XContentSubParser subParser = new XContentSubParser(parser)) { try (XContentSubParser subParser = new XContentSubParser(parser)) {
return PARSER.apply(subParser, new ParserContext(rightOrientation, coerce, ignoreZValue)); return PARSER.apply(subParser, this);
} }
} }
@ -197,26 +203,14 @@ public final class GeoJson {
return builder.endObject(); return builder.endObject();
} }
private static class ParserContext { private static ConstructingObjectParser<Geometry, GeoJson> PARSER =
public final boolean defaultOrientation;
public final boolean coerce;
public final boolean ignoreZValue;
ParserContext(boolean defaultOrientation, boolean coerce, boolean ignoreZValue) {
this.defaultOrientation = defaultOrientation;
this.coerce = coerce;
this.ignoreZValue = ignoreZValue;
}
}
private static ConstructingObjectParser<Geometry, ParserContext> PARSER =
new ConstructingObjectParser<>("geojson", true, (a, c) -> { new ConstructingObjectParser<>("geojson", true, (a, c) -> {
String type = (String) a[0]; String type = (String) a[0];
CoordinateNode coordinates = (CoordinateNode) a[1]; CoordinateNode coordinates = (CoordinateNode) a[1];
@SuppressWarnings("unchecked") List<Geometry> geometries = (List<Geometry>) a[2]; @SuppressWarnings("unchecked") List<Geometry> geometries = (List<Geometry>) a[2];
Boolean orientation = orientationFromString((String) a[3]); Boolean orientation = orientationFromString((String) a[3]);
DistanceUnit.Distance radius = (DistanceUnit.Distance) a[4]; DistanceUnit.Distance radius = (DistanceUnit.Distance) a[4];
return createGeometry(type, geometries, coordinates, orientation, c.defaultOrientation, c.coerce, radius); return createGeometry(type, geometries, coordinates, orientation, c.rightOrientation, c.coerce, radius);
}); });
static { static {

View File

@ -32,22 +32,26 @@ import java.text.ParseException;
*/ */
public final class GeometryParser { public final class GeometryParser {
private GeometryParser() { private final GeoJson geoJsonParser;
private final WellKnownText wellKnownTextParser;
public GeometryParser(boolean rightOrientation, boolean coerce, boolean ignoreZValue) {
geoJsonParser = new GeoJson(rightOrientation, coerce, ignoreZValue);
wellKnownTextParser = new WellKnownText();
} }
/** /**
* Parses supplied XContent into Geometry * Parses supplied XContent into Geometry
*/ */
public static Geometry parse(XContentParser parser, boolean orientation, boolean coerce, boolean ignoreZValue) throws IOException, public Geometry parse(XContentParser parser) throws IOException,
ParseException { ParseException {
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
return null; return null;
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
return GeoJson.fromXContent(parser, orientation, coerce, ignoreZValue); return geoJsonParser.fromXContent(parser);
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
// TODO: Add support for ignoreZValue and coerce to WKT // TODO: Add support for ignoreZValue and coerce to WKT
return WellKnownText.fromWKT(parser.text()); return wellKnownTextParser.fromWKT(parser.text());
} }
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates"); throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.discovery;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
@ -89,6 +90,13 @@ public class HandshakingTransportAddressConnector implements TransportAddressCon
remoteNode = transportService.handshake(connection, probeHandshakeTimeout.millis()); remoteNode = transportService.handshake(connection, probeHandshakeTimeout.millis());
// success means (amongst other things) that the cluster names match // success means (amongst other things) that the cluster names match
logger.trace("[{}] handshake successful: {}", this, remoteNode); logger.trace("[{}] handshake successful: {}", this, remoteNode);
} catch (Exception e) {
// we opened a connection and successfully performed a low-level handshake, so we were definitely talking to an
// Elasticsearch node, but the high-level handshake failed indicating some kind of mismatched configurations
// (e.g. cluster name) that the user should address
logger.warn(new ParameterizedMessage("handshake failed for [{}]", this), e);
listener.onFailure(e);
return;
} finally { } finally {
IOUtils.closeWhileHandlingException(connection); IOUtils.closeWhileHandlingException(connection);
} }

View File

@ -52,9 +52,12 @@ import static org.elasticsearch.cluster.coordination.LeaderChecker.LEADER_CHECK_
import static org.elasticsearch.node.Node.NODE_NAME_SETTING; import static org.elasticsearch.node.Node.NODE_NAME_SETTING;
import static org.elasticsearch.transport.TransportService.HANDSHAKE_ACTION_NAME; import static org.elasticsearch.transport.TransportService.HANDSHAKE_ACTION_NAME;
import static org.elasticsearch.transport.TransportService.NOOP_TRANSPORT_INTERCEPTOR; import static org.elasticsearch.transport.TransportService.NOOP_TRANSPORT_INTERCEPTOR;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.matchesRegex;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
public class LeaderCheckerTests extends ESTestCase { public class LeaderCheckerTests extends ESTestCase {
@ -146,7 +149,10 @@ public class LeaderCheckerTests extends ESTestCase {
final AtomicBoolean leaderFailed = new AtomicBoolean(); final AtomicBoolean leaderFailed = new AtomicBoolean();
final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService, final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService,
() -> assertTrue(leaderFailed.compareAndSet(false, true))); e -> {
assertThat(e.getMessage(), matchesRegex("node \\[.*\\] failed \\[[1-9][0-9]*\\] consecutive checks"));
assertTrue(leaderFailed.compareAndSet(false, true));
});
logger.info("--> creating first checker"); logger.info("--> creating first checker");
leaderChecker.updateLeader(leader1); leaderChecker.updateLeader(leader1);
@ -247,7 +253,10 @@ public class LeaderCheckerTests extends ESTestCase {
final AtomicBoolean leaderFailed = new AtomicBoolean(); final AtomicBoolean leaderFailed = new AtomicBoolean();
final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService, final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService,
() -> assertTrue(leaderFailed.compareAndSet(false, true))); e -> {
assertThat(e.getMessage(), anyOf(endsWith("disconnected"), endsWith("disconnected during check")));
assertTrue(leaderFailed.compareAndSet(false, true));
});
leaderChecker.updateLeader(leader); leaderChecker.updateLeader(leader);
{ {
@ -316,7 +325,7 @@ public class LeaderCheckerTests extends ESTestCase {
transportService.start(); transportService.start();
transportService.acceptIncomingRequests(); transportService.acceptIncomingRequests();
final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService, () -> fail("shouldn't be checking anything")); final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService, e -> fail("shouldn't be checking anything"));
final DiscoveryNodes discoveryNodes final DiscoveryNodes discoveryNodes
= DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId()).build(); = DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId()).build();

View File

@ -70,7 +70,7 @@ abstract class BaseGeoParsingTestCase extends ESTestCase {
protected void assertGeometryEquals(org.elasticsearch.geo.geometry.Geometry expected, XContentBuilder geoJson) throws IOException { protected void assertGeometryEquals(org.elasticsearch.geo.geometry.Geometry expected, XContentBuilder geoJson) throws IOException {
try (XContentParser parser = createParser(geoJson)) { try (XContentParser parser = createParser(geoJson)) {
parser.nextToken(); parser.nextToken();
assertEquals(expected, GeoJson.fromXContent(parser, true, false, false)); assertEquals(expected, new GeoJson(true, false, false).fromXContent(parser));
} }
} }

View File

@ -72,7 +72,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
Line expected = new Line(new double[] {0.0, 1.0}, new double[] { 100.0, 101.0}); Line expected = new Line(new double[] {0.0, 1.0}, new double[] { 100.0, 101.0});
try (XContentParser parser = createParser(lineGeoJson)) { try (XContentParser parser = createParser(lineGeoJson)) {
parser.nextToken(); parser.nextToken();
assertEquals(expected, GeoJson.fromXContent(parser, false, false, true)); assertEquals(expected, new GeoJson(false, false, true).fromXContent(parser));
} }
} }
@ -124,7 +124,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(pointGeoJson)) { try (XContentParser parser = createParser(pointGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -140,7 +140,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(lineGeoJson)) { try (XContentParser parser = createParser(lineGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -178,7 +178,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(multilinesGeoJson)) { try (XContentParser parser = createParser(multilinesGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -189,7 +189,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(multilinesGeoJson)) { try (XContentParser parser = createParser(multilinesGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -239,7 +239,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
)); ));
try (XContentParser parser = createParser(polygonGeoJson)) { try (XContentParser parser = createParser(polygonGeoJson)) {
parser.nextToken(); parser.nextToken();
assertEquals(expected, GeoJson.fromXContent(parser, true, false, true)); assertEquals(expected, new GeoJson(true, false, true).fromXContent(parser));
} }
} }
@ -259,7 +259,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(polygonGeoJson)) { try (XContentParser parser = createParser(polygonGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, true)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, true).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -275,7 +275,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(invalidPoint1)) { try (XContentParser parser = createParser(invalidPoint1)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -288,7 +288,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(invalidPoint2)) { try (XContentParser parser = createParser(invalidPoint2)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -302,7 +302,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(invalidMultipoint1)) { try (XContentParser parser = createParser(invalidMultipoint1)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -315,7 +315,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(invalidMultipoint2)) { try (XContentParser parser = createParser(invalidMultipoint2)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -329,7 +329,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject(); .endObject();
try (XContentParser parser = createParser(invalidMultipoint3)) { try (XContentParser parser = createParser(invalidMultipoint3)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -370,7 +370,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -391,7 +391,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
.endObject()); .endObject());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -406,7 +406,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -421,7 +421,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -436,7 +436,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -449,7 +449,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -460,7 +460,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -473,7 +473,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -710,7 +710,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(tooLittlePointGeoJson)) { try (XContentParser parser = createParser(tooLittlePointGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
@ -723,7 +723,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
try (XContentParser parser = createParser(emptyPointGeoJson)) { try (XContentParser parser = createParser(emptyPointGeoJson)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertNull(parser.nextToken()); assertNull(parser.nextToken());
} }
} }
@ -749,7 +749,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
parser.nextToken(); // foo parser.nextToken(); // foo
parser.nextToken(); // start object parser.nextToken(); // start object
parser.nextToken(); // start object parser.nextToken(); // start object
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false)); expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // end of the document assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // end of the document
assertNull(parser.nextToken()); // no more elements afterwards assertNull(parser.nextToken()); // no more elements afterwards
} }

View File

@ -49,6 +49,7 @@ public class GeoJsonSerializationTests extends ESTestCase {
private static class GeometryWrapper implements ToXContentObject { private static class GeometryWrapper implements ToXContentObject {
private Geometry geometry; private Geometry geometry;
private static GeoJson PARSER = new GeoJson(true, false, true);
GeometryWrapper(Geometry geometry) { GeometryWrapper(Geometry geometry) {
this.geometry = geometry; this.geometry = geometry;
@ -61,7 +62,7 @@ public class GeoJsonSerializationTests extends ESTestCase {
public static GeometryWrapper fromXContent(XContentParser parser) throws IOException { public static GeometryWrapper fromXContent(XContentParser parser) throws IOException {
parser.nextToken(); parser.nextToken();
return new GeometryWrapper(GeoJson.fromXContent(parser, true, false, true)); return new GeometryWrapper(PARSER.fromXContent(parser));
} }
@Override @Override

View File

@ -44,7 +44,7 @@ public class GeometryParserTests extends ESTestCase {
try (XContentParser parser = createParser(pointGeoJson)) { try (XContentParser parser = createParser(pointGeoJson)) {
parser.nextToken(); parser.nextToken();
assertEquals(new Point(0, 100), GeometryParser.parse(parser, true, randomBoolean(), randomBoolean())); assertEquals(new Point(0, 100), new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
} }
XContentBuilder pointGeoJsonWithZ = XContentFactory.jsonBuilder() XContentBuilder pointGeoJsonWithZ = XContentFactory.jsonBuilder()
@ -55,13 +55,13 @@ public class GeometryParserTests extends ESTestCase {
try (XContentParser parser = createParser(pointGeoJsonWithZ)) { try (XContentParser parser = createParser(pointGeoJsonWithZ)) {
parser.nextToken(); parser.nextToken();
assertEquals(new Point(0, 100, 10.0), GeometryParser.parse(parser, true, randomBoolean(), true)); assertEquals(new Point(0, 100, 10.0), new GeometryParser(true, randomBoolean(), true).parse(parser));
} }
try (XContentParser parser = createParser(pointGeoJsonWithZ)) { try (XContentParser parser = createParser(pointGeoJsonWithZ)) {
parser.nextToken(); parser.nextToken();
expectThrows(XContentParseException.class, () -> GeometryParser.parse(parser, true, randomBoolean(), false)); expectThrows(XContentParseException.class, () -> new GeometryParser(true, randomBoolean(), false).parse(parser));
} }
XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder()
@ -81,13 +81,13 @@ public class GeometryParserTests extends ESTestCase {
try (XContentParser parser = createParser(polygonGeoJson)) { try (XContentParser parser = createParser(polygonGeoJson)) {
parser.nextToken(); parser.nextToken();
// Coerce should automatically close the polygon // Coerce should automatically close the polygon
assertEquals(p, GeometryParser.parse(parser, true, true, randomBoolean())); assertEquals(p, new GeometryParser(true, true, randomBoolean()).parse(parser));
} }
try (XContentParser parser = createParser(polygonGeoJson)) { try (XContentParser parser = createParser(polygonGeoJson)) {
parser.nextToken(); parser.nextToken();
// No coerce - the polygon parsing should fail // No coerce - the polygon parsing should fail
expectThrows(XContentParseException.class, () -> GeometryParser.parse(parser, true, false, randomBoolean())); expectThrows(XContentParseException.class, () -> new GeometryParser(true, false, randomBoolean()).parse(parser));
} }
} }
@ -101,7 +101,7 @@ public class GeometryParserTests extends ESTestCase {
parser.nextToken(); // Start object parser.nextToken(); // Start object
parser.nextToken(); // Field Name parser.nextToken(); // Field Name
parser.nextToken(); // Field Value parser.nextToken(); // Field Value
assertEquals(new Point(0, 100), GeometryParser.parse(parser, true, randomBoolean(), randomBoolean())); assertEquals(new Point(0, 100), new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
} }
} }
@ -115,7 +115,7 @@ public class GeometryParserTests extends ESTestCase {
parser.nextToken(); // Start object parser.nextToken(); // Start object
parser.nextToken(); // Field Name parser.nextToken(); // Field Name
parser.nextToken(); // Field Value parser.nextToken(); // Field Value
assertNull(GeometryParser.parse(parser, true, randomBoolean(), randomBoolean())); assertNull(new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
} }
} }
@ -130,7 +130,7 @@ public class GeometryParserTests extends ESTestCase {
parser.nextToken(); // Field Name parser.nextToken(); // Field Name
parser.nextToken(); // Field Value parser.nextToken(); // Field Value
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class, ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> GeometryParser.parse(parser, true, randomBoolean(), randomBoolean())); () -> new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
assertEquals("shape must be an object consisting of type and coordinates", ex.getMessage()); assertEquals("shape must be an object consisting of type and coordinates", ex.getMessage());
} }
} }

View File

@ -137,6 +137,7 @@ public class ConcurrentSeqNoVersioningIT extends AbstractDisruptionTestCase {
assertAcked(prepareCreate("test") assertAcked(prepareCreate("test")
.setSettings(Settings.builder() .setSettings(Settings.builder()
.put(indexSettings())
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1 + randomInt(2)) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1 + randomInt(2))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomInt(3)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomInt(3))
)); ));

View File

@ -96,33 +96,33 @@ public class StartDataFrameTransformAction extends Action<StartDataFrameTransfor
} }
public static class Response extends BaseTasksResponse implements ToXContentObject { public static class Response extends BaseTasksResponse implements ToXContentObject {
private final boolean started; private final boolean acknowledged;
public Response(StreamInput in) throws IOException { public Response(StreamInput in) throws IOException {
super(in); super(in);
started = in.readBoolean(); acknowledged = in.readBoolean();
} }
public Response(boolean started) { public Response(boolean acknowledged) {
super(Collections.emptyList(), Collections.emptyList()); super(Collections.emptyList(), Collections.emptyList());
this.started = started; this.acknowledged = acknowledged;
} }
public boolean isStarted() { public boolean isAcknowledged() {
return started; return acknowledged;
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeBoolean(started); out.writeBoolean(acknowledged);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
toXContentCommon(builder, params); toXContentCommon(builder, params);
builder.field("started", started); builder.field("acknowledged", acknowledged);
builder.endObject(); builder.endObject();
return builder; return builder;
} }
@ -137,12 +137,12 @@ public class StartDataFrameTransformAction extends Action<StartDataFrameTransfor
return false; return false;
} }
Response response = (Response) obj; Response response = (Response) obj;
return started == response.started; return acknowledged == response.acknowledged;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(started); return Objects.hash(acknowledged);
} }
} }
} }

View File

@ -158,40 +158,40 @@ public class StopDataFrameTransformAction extends Action<StopDataFrameTransformA
public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject {
private final boolean stopped; private final boolean acknowledged;
public Response(StreamInput in) throws IOException { public Response(StreamInput in) throws IOException {
super(in); super(in);
stopped = in.readBoolean(); acknowledged = in.readBoolean();
} }
public Response(boolean stopped) { public Response(boolean acknowledged) {
super(Collections.emptyList(), Collections.emptyList()); super(Collections.emptyList(), Collections.emptyList());
this.stopped = stopped; this.acknowledged = acknowledged;
} }
public Response(List<TaskOperationFailure> taskFailures, public Response(List<TaskOperationFailure> taskFailures,
List<? extends ElasticsearchException> nodeFailures, List<? extends ElasticsearchException> nodeFailures,
boolean stopped) { boolean acknowledged) {
super(taskFailures, nodeFailures); super(taskFailures, nodeFailures);
this.stopped = stopped; this.acknowledged = acknowledged;
} }
public boolean isStopped() { public boolean isAcknowledged() {
return stopped; return acknowledged;
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeBoolean(stopped); out.writeBoolean(acknowledged);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
toXContentCommon(builder, params); toXContentCommon(builder, params);
builder.field("stopped", stopped); builder.field("acknowledged", acknowledged);
builder.endObject(); builder.endObject();
return builder; return builder;
} }
@ -203,12 +203,12 @@ public class StopDataFrameTransformAction extends Action<StopDataFrameTransformA
if (o == null || getClass() != o.getClass()) if (o == null || getClass() != o.getClass())
return false; return false;
Response response = (Response) o; Response response = (Response) o;
return stopped == response.stopped; return acknowledged == response.acknowledged;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(stopped); return Objects.hash(acknowledged);
} }
} }
} }

View File

@ -225,7 +225,6 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase {
} }
} }
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/42084")
public void testStateMachine() throws Exception { public void testStateMachine() throws Exception {
AtomicReference<IndexerState> state = new AtomicReference<>(IndexerState.STOPPED); AtomicReference<IndexerState> state = new AtomicReference<>(IndexerState.STOPPED);
final ExecutorService executor = Executors.newFixedThreadPool(1); final ExecutorService executor = Executors.newFixedThreadPool(1);
@ -236,10 +235,11 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase {
assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); assertThat(indexer.getState(), equalTo(IndexerState.STARTED));
assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()));
assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); assertThat(indexer.getState(), equalTo(IndexerState.INDEXING));
assertTrue(awaitBusy(() -> indexer.getPosition() == 2));
countDownLatch.countDown(); countDownLatch.countDown();
assertThat(indexer.getPosition(), equalTo(2));
assertTrue(awaitBusy(() -> isFinished.get())); assertTrue(awaitBusy(() -> isFinished.get()));
assertThat(indexer.getPosition(), equalTo(3));
assertFalse(isStopped.get()); assertFalse(isStopped.get());
assertThat(indexer.getStep(), equalTo(6)); assertThat(indexer.getStep(), equalTo(6));
assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); assertThat(indexer.getStats().getNumInvocations(), equalTo(1L));

View File

@ -50,7 +50,7 @@ public class DataFrameTransformIT extends DataFrameIntegTestCase {
REVIEWS_INDEX_NAME); REVIEWS_INDEX_NAME);
assertTrue(putDataFrameTransform(config, RequestOptions.DEFAULT).isAcknowledged()); assertTrue(putDataFrameTransform(config, RequestOptions.DEFAULT).isAcknowledged());
assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isStarted()); assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged());
waitUntilCheckpoint(config.getId(), 1L); waitUntilCheckpoint(config.getId(), 1L);

View File

@ -191,7 +191,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
startTransformRequest.setOptions(expectWarnings(warnings)); startTransformRequest.setOptions(expectWarnings(warnings));
} }
Map<String, Object> startTransformResponse = entityAsMap(client().performRequest(startTransformRequest)); Map<String, Object> startTransformResponse = entityAsMap(client().performRequest(startTransformRequest));
assertThat(startTransformResponse.get("started"), equalTo(Boolean.TRUE)); assertThat(startTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
} }
protected void stopDataFrameTransform(String transformId, boolean force) throws Exception { protected void stopDataFrameTransform(String transformId, boolean force) throws Exception {
@ -200,7 +200,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
stopTransformRequest.addParameter(DataFrameField.FORCE.getPreferredName(), Boolean.toString(force)); stopTransformRequest.addParameter(DataFrameField.FORCE.getPreferredName(), Boolean.toString(force));
stopTransformRequest.addParameter(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), Boolean.toString(true)); stopTransformRequest.addParameter(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), Boolean.toString(true));
Map<String, Object> stopTransformResponse = entityAsMap(client().performRequest(stopTransformRequest)); Map<String, Object> stopTransformResponse = entityAsMap(client().performRequest(stopTransformRequest));
assertThat(stopTransformResponse.get("stopped"), equalTo(Boolean.TRUE)); assertThat(stopTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
} }
protected void startAndWaitForTransform(String transformId, String dataFrameIndex) throws Exception { protected void startAndWaitForTransform(String transformId, String dataFrameIndex) throws Exception {

View File

@ -124,8 +124,8 @@ public class TransportStopDataFrameTransformAction extends
} }
// if tasks is empty allMatch is 'vacuously satisfied' // if tasks is empty allMatch is 'vacuously satisfied'
boolean allStopped = tasks.stream().allMatch(StopDataFrameTransformAction.Response::isStopped); boolean allAcknowledged = tasks.stream().allMatch(StopDataFrameTransformAction.Response::isAcknowledged);
return new StopDataFrameTransformAction.Response(allStopped); return new StopDataFrameTransformAction.Response(allAcknowledged);
} }
private ActionListener<StopDataFrameTransformAction.Response> private ActionListener<StopDataFrameTransformAction.Response>

View File

@ -30,7 +30,6 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransform;
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig;
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState;
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats; import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats;
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState;
import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IndexerState;
import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine;
import org.elasticsearch.xpack.dataframe.DataFrame; import org.elasticsearch.xpack.dataframe.DataFrame;
@ -223,18 +222,8 @@ public class DataFrameTransformPersistentTasksExecutor extends PersistentTasksEx
DataFrameTransformTask.ClientDataFrameIndexerBuilder indexerBuilder, DataFrameTransformTask.ClientDataFrameIndexerBuilder indexerBuilder,
Long previousCheckpoint, Long previousCheckpoint,
ActionListener<StartDataFrameTransformTaskAction.Response> listener) { ActionListener<StartDataFrameTransformTaskAction.Response> listener) {
// If we are stopped, and it is an initial run, this means we have never been started,
// attempt to start the task
buildTask.initializeIndexer(indexerBuilder); buildTask.initializeIndexer(indexerBuilder);
// TODO isInitialRun is false after relocation?? buildTask.start(previousCheckpoint, listener);
if (buildTask.getState().getTaskState().equals(DataFrameTransformTaskState.STOPPED) && buildTask.isInitialRun()) {
logger.info("Data frame transform [{}] created.", buildTask.getTransformId());
buildTask.start(previousCheckpoint, listener);
} else {
logger.debug("No need to start task. Its current state is: {}", buildTask.getState().getIndexerState());
listener.onResponse(new StartDataFrameTransformTaskAction.Response(true));
}
} }
@Override @Override

View File

@ -174,13 +174,8 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S
} }
} }
public boolean isStopped() { public void setTaskStateStopped() {
IndexerState currentState = getIndexer() == null ? initialIndexerState : getIndexer().getState(); taskState.set(DataFrameTransformTaskState.STOPPED);
return currentState.equals(IndexerState.STOPPED);
}
boolean isInitialRun() {
return getIndexer() != null && getIndexer().initialRun();
} }
/** /**
@ -235,11 +230,9 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S
public synchronized void stop() { public synchronized void stop() {
if (getIndexer() == null) { if (getIndexer() == null) {
return; // If there is no indexer the task has not been triggered
} // but it still needs to be stopped and removed
// taskState is initialized as STOPPED and is updated in tandem with the indexerState shutdown();
// Consequently, if it is STOPPED, we consider the whole task STOPPED.
if (taskState.get() == DataFrameTransformTaskState.STOPPED) {
return; return;
} }
@ -609,6 +602,8 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S
protected void onStop() { protected void onStop() {
auditor.info(transformConfig.getId(), "Indexer has stopped"); auditor.info(transformConfig.getId(), "Indexer has stopped");
logger.info("Data frame transform [{}] indexer has stopped", transformConfig.getId()); logger.info("Data frame transform [{}] indexer has stopped", transformConfig.getId());
transformTask.setTaskStateStopped();
transformsConfigManager.putOrUpdateTransformStats( transformsConfigManager.putOrUpdateTransformStats(
new DataFrameTransformStateAndStats(transformId, transformTask.getState(), getStats(), new DataFrameTransformStateAndStats(transformId, transformTask.getState(), getStats(),
DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null

View File

@ -531,9 +531,8 @@ public class TokenServiceTests extends ESTestCase {
} }
try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) {
// move to expiry // move to expiry, stripping nanoseconds, as we don't store them in the security-tokens index
clock.fastForwardSeconds(Math.toIntExact(defaultExpiration.getSeconds()) - fastForwardAmount); clock.setTime(userToken.getExpirationTime().truncatedTo(ChronoUnit.MILLIS).atZone(clock.getZone()));
clock.rewind(TimeValue.timeValueNanos(clock.instant().getNano())); // trim off nanoseconds since don't store them in the index
PlainActionFuture<UserToken> future = new PlainActionFuture<>(); PlainActionFuture<UserToken> future = new PlainActionFuture<>();
tokenService.getAndValidateToken(requestContext, future); tokenService.getAndValidateToken(requestContext, future);
assertAuthentication(authentication, future.get().getAuthentication()); assertAuthentication(authentication, future.get().getAuthentication());

View File

@ -54,6 +54,8 @@ import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.timeAsTime;
*/ */
final class TypeConverter { final class TypeConverter {
private static WellKnownText WKT = new WellKnownText();
private TypeConverter() {} private TypeConverter() {}
/** /**
@ -246,7 +248,7 @@ final class TypeConverter {
case GEO_POINT: case GEO_POINT:
case GEO_SHAPE: case GEO_SHAPE:
try { try {
return WellKnownText.fromWKT(v.toString()); return WKT.fromWKT(v.toString());
} catch (IOException | ParseException ex) { } catch (IOException | ParseException ex) {
throw new SQLException("Cannot parse geo_shape", ex); throw new SQLException("Cannot parse geo_shape", ex);
} }

View File

@ -51,6 +51,8 @@ public class JdbcAssert {
private static final IntObjectHashMap<EsType> SQL_TO_TYPE = new IntObjectHashMap<>(); private static final IntObjectHashMap<EsType> SQL_TO_TYPE = new IntObjectHashMap<>();
private static final WellKnownText WKT = new WellKnownText();
static { static {
for (EsType type : EsType.values()) { for (EsType type : EsType.values()) {
SQL_TO_TYPE.putIfAbsent(type.getVendorTypeNumber().intValue(), type); SQL_TO_TYPE.putIfAbsent(type.getVendorTypeNumber().intValue(), type);
@ -270,7 +272,7 @@ public class JdbcAssert {
if (actualObject instanceof Geometry) { if (actualObject instanceof Geometry) {
// We need to convert the expected object to libs/geo Geometry for comparision // We need to convert the expected object to libs/geo Geometry for comparision
try { try {
expectedObject = WellKnownText.fromWKT(expectedObject.toString()); expectedObject = WKT.fromWKT(expectedObject.toString());
} catch (IOException | ParseException ex) { } catch (IOException | ParseException ex) {
fail(ex.getMessage()); fail(ex.getMessage());
} }

View File

@ -49,6 +49,10 @@ public class GeoShape implements ToXContentFragment, NamedWriteable {
private final Geometry shape; private final Geometry shape;
private static final GeometryParser GEOMETRY_PARSER = new GeometryParser(true, true, true);
private static final WellKnownText WKT_PARSER = new WellKnownText();
public GeoShape(double lon, double lat) { public GeoShape(double lon, double lat) {
shape = new Point(lat, lon); shape = new Point(lat, lon);
} }
@ -72,17 +76,17 @@ public class GeoShape implements ToXContentFragment, NamedWriteable {
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(WellKnownText.toWKT(shape)); out.writeString(WKT_PARSER.toWKT(shape));
} }
@Override @Override
public String toString() { public String toString() {
return WellKnownText.toWKT(shape); return WKT_PARSER.toWKT(shape);
} }
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.value(WellKnownText.toWKT(shape)); return builder.value(WKT_PARSER.toWKT(shape));
} }
public Geometry toGeometry() { public Geometry toGeometry() {
@ -216,7 +220,7 @@ public class GeoShape implements ToXContentFragment, NamedWriteable {
parser.nextToken(); // start object parser.nextToken(); // start object
parser.nextToken(); // field name parser.nextToken(); // field name
parser.nextToken(); // field value parser.nextToken(); // field value
return GeometryParser.parse(parser, true, true, true); return GEOMETRY_PARSER.parse(parser);
} }
} }
} }

View File

@ -42,7 +42,7 @@ teardown:
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { started: true } - match: { acknowledged: true }
--- ---
"Test start missing transform": "Test start missing transform":
@ -56,7 +56,7 @@ teardown:
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { started: true } - match: { acknowledged: true }
- do: - do:
catch: /Unable to start data frame transform \[airline-transform-start-stop\] as it is in state \[STARTED\]/ catch: /Unable to start data frame transform \[airline-transform-start-stop\] as it is in state \[STARTED\]/
@ -68,7 +68,7 @@ teardown:
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { started: true } - match: { acknowledged: true }
- do: - do:
indices.get_mapping: indices.get_mapping:
index: airline-data-by-airline-start-stop index: airline-data-by-airline-start-stop
@ -83,17 +83,20 @@ teardown:
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { started: true } - match: { acknowledged: true }
- do: - do:
indices.get_mapping: indices.get_mapping:
index: airline-data-by-airline-start-stop index: airline-data-by-airline-start-stop
- match: { airline-data-by-airline-start-stop.mappings: {} } - match: { airline-data-by-airline-start-stop.mappings: {} }
--- ---
"Test start/stop/start transform": "Test start/stop/start transform":
- skip:
reason: "https://github.com/elastic/elasticsearch/issues/42650"
version: "all"
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { started: true } - match: { acknowledged: true }
- do: - do:
data_frame.get_data_frame_transform_stats: data_frame.get_data_frame_transform_stats:
@ -107,20 +110,20 @@ teardown:
data_frame.stop_data_frame_transform: data_frame.stop_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
wait_for_completion: true wait_for_completion: true
- match: { stopped: true } - match: { acknowledged: true }
- do: - do:
data_frame.get_data_frame_transform_stats: data_frame.get_data_frame_transform_stats:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { count: 1 } - match: { count: 1 }
- match: { transforms.0.id: "airline-transform-start-stop" } - match: { transforms.0.id: "airline-transform-start-stop" }
# - match: { transforms.0.state.indexer_state: "stopped" } - match: { transforms.0.state.indexer_state: "stopped" }
# - match: { transforms.0.state.task_state: "stopped" } - match: { transforms.0.state.task_state: "stopped" }
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { started: true } - match: { acknowledged: true }
- do: - do:
data_frame.get_data_frame_transform_stats: data_frame.get_data_frame_transform_stats:
@ -142,7 +145,7 @@ teardown:
- do: - do:
data_frame.stop_data_frame_transform: data_frame.stop_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { stopped: true } - match: { acknowledged: true }
--- ---
"Test start/stop only starts/stops specified transform": "Test start/stop only starts/stops specified transform":
@ -161,7 +164,7 @@ teardown:
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { started: true } - match: { acknowledged: true }
- do: - do:
data_frame.get_data_frame_transform_stats: data_frame.get_data_frame_transform_stats:
@ -182,12 +185,12 @@ teardown:
- do: - do:
data_frame.start_data_frame_transform: data_frame.start_data_frame_transform:
transform_id: "airline-transform-start-later" transform_id: "airline-transform-start-later"
- match: { started: true } - match: { acknowledged: true }
- do: - do:
data_frame.stop_data_frame_transform: data_frame.stop_data_frame_transform:
transform_id: "airline-transform-start-stop" transform_id: "airline-transform-start-stop"
- match: { stopped: true } - match: { acknowledged: true }
- do: - do:
data_frame.get_data_frame_transform_stats: data_frame.get_data_frame_transform_stats:
@ -201,7 +204,7 @@ teardown:
data_frame.stop_data_frame_transform: data_frame.stop_data_frame_transform:
transform_id: "airline-transform-start-later" transform_id: "airline-transform-start-later"
wait_for_completion: true wait_for_completion: true
- match: { stopped: true } - match: { acknowledged: true }
- do: - do:
data_frame.delete_data_frame_transform: data_frame.delete_data_frame_transform: