Merge branch '7.x' into enrich-7.x
This commit is contained in:
commit
215170b6c3
|
@ -48,9 +48,10 @@ final class CcrRequestConverters {
|
|||
.addPathPartAsIs("_ccr", "follow")
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withWaitForActiveShards(putFollowRequest.waitForActiveShards());
|
||||
request.setEntity(createEntity(putFollowRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,22 +36,21 @@ final class ClusterRequestConverters {
|
|||
static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest) throws IOException {
|
||||
Request request = new Request(HttpPut.METHOD_NAME, "/_cluster/settings");
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(clusterUpdateSettingsRequest.timeout());
|
||||
parameters.withMasterTimeout(clusterUpdateSettingsRequest.masterNodeTimeout());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(clusterUpdateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRequest) throws IOException {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_cluster/settings");
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withLocal(clusterGetSettingsRequest.local());
|
||||
parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults());
|
||||
parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -63,7 +62,7 @@ final class ClusterRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
new RequestConverters.Params(request)
|
||||
RequestConverters.Params params = new RequestConverters.Params()
|
||||
.withWaitForStatus(healthRequest.waitForStatus())
|
||||
.withWaitForNoRelocatingShards(healthRequest.waitForNoRelocatingShards())
|
||||
.withWaitForNoInitializingShards(healthRequest.waitForNoInitializingShards())
|
||||
|
@ -74,6 +73,7 @@ final class ClusterRequestConverters {
|
|||
.withMasterTimeout(healthRequest.masterNodeTimeout())
|
||||
.withLocal(healthRequest.local())
|
||||
.withLevel(healthRequest.level());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,10 +82,11 @@ final class DataFrameRequestConverters {
|
|||
.addPathPartAsIs("_start")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (startRequest.getTimeout() != null) {
|
||||
params.withTimeout(startRequest.getTimeout());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -96,13 +97,14 @@ final class DataFrameRequestConverters {
|
|||
.addPathPartAsIs("_stop")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (stopRequest.getWaitForCompletion() != null) {
|
||||
params.withWaitForCompletion(stopRequest.getWaitForCompletion());
|
||||
}
|
||||
if (stopRequest.getTimeout() != null) {
|
||||
params.withTimeout(stopRequest.getTimeout());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -44,9 +44,10 @@ final class IndexLifecycleRequestConverters {
|
|||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ilm/policy")
|
||||
.addCommaSeparatedPathParts(getLifecyclePolicyRequest.getPolicyNames()).build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(getLifecyclePolicyRequest.masterNodeTimeout());
|
||||
params.withTimeout(getLifecyclePolicyRequest.timeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -56,9 +57,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addPathPartAsIs(putLifecycleRequest.getName())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(putLifecycleRequest.masterNodeTimeout());
|
||||
params.withTimeout(putLifecycleRequest.timeout());
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(putLifecycleRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -69,9 +71,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addPathPartAsIs("_ilm/policy")
|
||||
.addPathPartAsIs(deleteLifecyclePolicyRequest.getLifecyclePolicy())
|
||||
.build());
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(deleteLifecyclePolicyRequest.masterNodeTimeout());
|
||||
params.withTimeout(deleteLifecyclePolicyRequest.timeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -83,9 +86,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addCommaSeparatedPathParts(indices)
|
||||
.addPathPartAsIs("_ilm", "remove")
|
||||
.build());
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(removePolicyRequest.indicesOptions());
|
||||
params.withMasterTimeout(removePolicyRequest.masterNodeTimeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -95,9 +99,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addPathPartAsIs("_ilm")
|
||||
.addPathPartAsIs("start")
|
||||
.build());
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(startILMRequest.masterNodeTimeout());
|
||||
params.withTimeout(startILMRequest.timeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -107,9 +112,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addPathPartAsIs("_ilm")
|
||||
.addPathPartAsIs("stop")
|
||||
.build());
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(stopILMRequest.masterNodeTimeout());
|
||||
params.withTimeout(stopILMRequest.timeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -119,9 +125,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addPathPartAsIs("_ilm")
|
||||
.addPathPartAsIs("status")
|
||||
.build());
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(lifecycleManagementStatusRequest.masterNodeTimeout());
|
||||
params.withTimeout(lifecycleManagementStatusRequest.timeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -132,9 +139,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addPathPartAsIs("_ilm")
|
||||
.addPathPartAsIs("explain")
|
||||
.build());
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(explainLifecycleRequest.indicesOptions());
|
||||
params.withMasterTimeout(explainLifecycleRequest.masterNodeTimeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -145,9 +153,10 @@ final class IndexLifecycleRequestConverters {
|
|||
.addPathPartAsIs("_ilm")
|
||||
.addPathPartAsIs("retry")
|
||||
.build());
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(retryLifecyclePolicyRequest.masterNodeTimeout());
|
||||
params.withTimeout(retryLifecyclePolicyRequest.timeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,10 +67,11 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices());
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(deleteIndexRequest.timeout());
|
||||
parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(deleteIndexRequest.indicesOptions());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -78,11 +79,12 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(openIndexRequest.indices(), "_open");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(openIndexRequest.timeout());
|
||||
parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout());
|
||||
parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards());
|
||||
parameters.withIndicesOptions(openIndexRequest.indicesOptions());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -90,10 +92,11 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(closeIndexRequest.indices(), "_close");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(closeIndexRequest.timeout());
|
||||
parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(closeIndexRequest.indicesOptions());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -102,11 +105,11 @@ final class IndicesRequestConverters {
|
|||
.addPathPart(createIndexRequest.index()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(createIndexRequest.timeout());
|
||||
parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout());
|
||||
parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -116,12 +119,12 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(createIndexRequest.indices());
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(createIndexRequest.timeout());
|
||||
parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout());
|
||||
parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards());
|
||||
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -129,10 +132,10 @@ final class IndicesRequestConverters {
|
|||
static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_aliases");
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(indicesAliasesRequest.timeout());
|
||||
parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -141,9 +144,10 @@ final class IndicesRequestConverters {
|
|||
static Request putMapping(PutMappingRequest putMappingRequest) throws IOException {
|
||||
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping"));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(putMappingRequest.timeout());
|
||||
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -162,11 +166,11 @@ final class IndicesRequestConverters {
|
|||
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(),
|
||||
"_mapping", putMappingRequest.type()));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(putMappingRequest.timeout());
|
||||
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
|
||||
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -176,11 +180,11 @@ final class IndicesRequestConverters {
|
|||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping"));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
|
||||
parameters.withLocal(getMappingsRequest.local());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -191,12 +195,12 @@ final class IndicesRequestConverters {
|
|||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping", types));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
|
||||
parameters.withLocal(getMappingsRequest.local());
|
||||
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -213,11 +217,11 @@ final class IndicesRequestConverters {
|
|||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions());
|
||||
parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults());
|
||||
parameters.withLocal(getFieldMappingsRequest.local());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -234,12 +238,12 @@ final class IndicesRequestConverters {
|
|||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions());
|
||||
parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults());
|
||||
parameters.withLocal(getFieldMappingsRequest.local());
|
||||
parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -247,8 +251,9 @@ final class IndicesRequestConverters {
|
|||
String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh"));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withIndicesOptions(refreshRequest.indicesOptions());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -256,18 +261,20 @@ final class IndicesRequestConverters {
|
|||
String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush"));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withIndicesOptions(flushRequest.indicesOptions());
|
||||
parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing()));
|
||||
parameters.putParam("force", Boolean.toString(flushRequest.force()));
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request flushSynced(SyncedFlushRequest syncedFlushRequest) {
|
||||
String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush/synced"));
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withIndicesOptions(syncedFlushRequest.indicesOptions());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -275,11 +282,12 @@ final class IndicesRequestConverters {
|
|||
String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge"));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withIndicesOptions(forceMergeRequest.indicesOptions());
|
||||
parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments()));
|
||||
parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes()));
|
||||
parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush()));
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -287,12 +295,13 @@ final class IndicesRequestConverters {
|
|||
String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear"));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions());
|
||||
parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache()));
|
||||
parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache()));
|
||||
parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache()));
|
||||
parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields()));
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -306,9 +315,10 @@ final class IndicesRequestConverters {
|
|||
|
||||
Request request = new Request(HttpHead.METHOD_NAME, RequestConverters.endpoint(indices, "_alias", aliases));
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(getAliasesRequest.indicesOptions());
|
||||
params.withLocal(getAliasesRequest.local());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -332,11 +342,11 @@ final class IndicesRequestConverters {
|
|||
.addPathPart(resizeRequest.getTargetIndexRequest().index()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withTimeout(resizeRequest.timeout());
|
||||
params.withMasterTimeout(resizeRequest.masterNodeTimeout());
|
||||
params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards());
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -346,14 +356,14 @@ final class IndicesRequestConverters {
|
|||
.addPathPart(rolloverRequest.getNewIndexName()).build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withTimeout(rolloverRequest.timeout());
|
||||
params.withMasterTimeout(rolloverRequest.masterNodeTimeout());
|
||||
params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards());
|
||||
if (rolloverRequest.isDryRun()) {
|
||||
params.putParam("dry_run", Boolean.TRUE.toString());
|
||||
}
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -364,7 +374,7 @@ final class IndicesRequestConverters {
|
|||
.addPathPart(rolloverRequest.getNewIndexName()).build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withTimeout(rolloverRequest.timeout());
|
||||
params.withMasterTimeout(rolloverRequest.masterNodeTimeout());
|
||||
params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards());
|
||||
|
@ -373,7 +383,7 @@ final class IndicesRequestConverters {
|
|||
}
|
||||
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -384,12 +394,12 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(indices, "_settings", names);
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(getSettingsRequest.indicesOptions());
|
||||
params.withLocal(getSettingsRequest.local());
|
||||
params.withIncludeDefaults(getSettingsRequest.includeDefaults());
|
||||
params.withMasterTimeout(getSettingsRequest.masterNodeTimeout());
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -404,14 +414,14 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(indices);
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
||||
params.withLocal(getIndexRequest.local());
|
||||
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
||||
params.withHuman(getIndexRequest.humanReadable());
|
||||
params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
|
||||
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -421,13 +431,13 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(indices);
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
||||
params.withLocal(getIndexRequest.local());
|
||||
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
||||
params.withHuman(getIndexRequest.humanReadable());
|
||||
params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -444,12 +454,13 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), "");
|
||||
Request request = new Request(HttpHead.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withLocal(getIndexRequest.local());
|
||||
params.withHuman(getIndexRequest.humanReadable());
|
||||
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
||||
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
||||
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -461,11 +472,12 @@ final class IndicesRequestConverters {
|
|||
String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), "");
|
||||
Request request = new Request(HttpHead.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withLocal(getIndexRequest.local());
|
||||
params.withHuman(getIndexRequest.humanReadable());
|
||||
params.withIndicesOptions(getIndexRequest.indicesOptions());
|
||||
params.withIncludeDefaults(getIndexRequest.includeDefaults());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -473,12 +485,12 @@ final class IndicesRequestConverters {
|
|||
String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(indices, "_settings"));
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(updateSettingsRequest.timeout());
|
||||
parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(updateSettingsRequest.indicesOptions());
|
||||
parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(updateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -493,7 +505,7 @@ final class IndicesRequestConverters {
|
|||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
|
||||
.addPathPart(putIndexTemplateRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
|
||||
if (putIndexTemplateRequest.create()) {
|
||||
params.putParam("create", Boolean.TRUE.toString());
|
||||
|
@ -502,6 +514,7 @@ final class IndicesRequestConverters {
|
|||
params.putParam("cause", putIndexTemplateRequest.cause());
|
||||
}
|
||||
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -510,7 +523,7 @@ final class IndicesRequestConverters {
|
|||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
|
||||
.addPathPart(putIndexTemplateRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
|
||||
if (putIndexTemplateRequest.create()) {
|
||||
params.putParam("create", Boolean.TRUE.toString());
|
||||
|
@ -518,6 +531,7 @@ final class IndicesRequestConverters {
|
|||
if (Strings.hasText(putIndexTemplateRequest.cause())) {
|
||||
params.putParam("cause", putIndexTemplateRequest.cause());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -527,11 +541,12 @@ final class IndicesRequestConverters {
|
|||
String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types();
|
||||
String endpoint = RequestConverters.endpoint(indices, types, "_validate/query");
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(validateQueryRequest.indicesOptions());
|
||||
params.putParam("explain", Boolean.toString(validateQueryRequest.explain()));
|
||||
params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards()));
|
||||
params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(validateQueryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -541,9 +556,10 @@ final class IndicesRequestConverters {
|
|||
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
||||
String endpoint = RequestConverters.endpoint(indices, "_alias", aliases);
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withIndicesOptions(getAliasesRequest.indicesOptions());
|
||||
params.withLocal(getAliasesRequest.local());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -562,12 +578,13 @@ final class IndicesRequestConverters {
|
|||
.addCommaSeparatedPathParts(getIndexTemplatesRequest.names())
|
||||
.build();
|
||||
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
final RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
final RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withLocal(getIndexTemplatesRequest.isLocal());
|
||||
params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout());
|
||||
if (includeTypeName) {
|
||||
params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -577,9 +594,10 @@ final class IndicesRequestConverters {
|
|||
.addCommaSeparatedPathParts(indexTemplatesExistRequest.names())
|
||||
.build();
|
||||
final Request request = new Request(HttpHead.METHOD_NAME, endpoint);
|
||||
final RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
final RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withLocal(indexTemplatesExistRequest.isLocal());
|
||||
params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -598,22 +616,24 @@ final class IndicesRequestConverters {
|
|||
static Request freezeIndex(FreezeIndexRequest freezeIndexRequest) {
|
||||
String endpoint = RequestConverters.endpoint(freezeIndexRequest.getIndices(), "_freeze");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(freezeIndexRequest.timeout());
|
||||
parameters.withMasterTimeout(freezeIndexRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(freezeIndexRequest.indicesOptions());
|
||||
parameters.withWaitForActiveShards(freezeIndexRequest.getWaitForActiveShards());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request unfreezeIndex(UnfreezeIndexRequest unfreezeIndexRequest) {
|
||||
String endpoint = RequestConverters.endpoint(unfreezeIndexRequest.getIndices(), "_unfreeze");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(unfreezeIndexRequest.timeout());
|
||||
parameters.withMasterTimeout(unfreezeIndexRequest.masterNodeTimeout());
|
||||
parameters.withIndicesOptions(unfreezeIndexRequest.indicesOptions());
|
||||
parameters.withWaitForActiveShards(unfreezeIndexRequest.getWaitForActiveShards());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -621,8 +641,9 @@ final class IndicesRequestConverters {
|
|||
String name = deleteIndexTemplateRequest.name();
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addPathPart(name).build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,8 +41,9 @@ final class IngestRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -53,10 +54,10 @@ final class IngestRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(putPipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -68,10 +69,10 @@ final class IngestRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(deletePipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -83,8 +84,9 @@ final class IngestRequestConverters {
|
|||
builder.addPathPartAsIs("_simulate");
|
||||
String endpoint = builder.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose()));
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(simulatePipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
|
|
@ -36,12 +36,13 @@ final class LicenseRequestConverters {
|
|||
static Request putLicense(PutLicenseRequest putLicenseRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(putLicenseRequest.timeout());
|
||||
parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout());
|
||||
if (putLicenseRequest.isAcknowledge()) {
|
||||
parameters.putParam("acknowledge", "true");
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setJsonEntity(putLicenseRequest.getLicenseDefinition());
|
||||
return request;
|
||||
}
|
||||
|
@ -49,17 +50,19 @@ final class LicenseRequestConverters {
|
|||
static Request getLicense(GetLicenseRequest getLicenseRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withLocal(getLicenseRequest.isLocal());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license").build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(deleteLicenseRequest.timeout());
|
||||
parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -67,11 +70,12 @@ final class LicenseRequestConverters {
|
|||
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_license", "start_trial").build();
|
||||
final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.putParam("acknowledge", Boolean.toString(startTrialRequest.isAcknowledge()));
|
||||
if (startTrialRequest.getLicenseType() != null) {
|
||||
parameters.putParam("type", startTrialRequest.getLicenseType());
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -80,12 +84,13 @@ final class LicenseRequestConverters {
|
|||
.addPathPartAsIs("_license", "start_basic")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(startBasicRequest.timeout());
|
||||
parameters.withMasterTimeout(startBasicRequest.masterNodeTimeout());
|
||||
if (startBasicRequest.isAcknowledge()) {
|
||||
parameters.putParam("acknowledge", "true");
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -105,11 +105,11 @@ final class MLRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (getJobRequest.getAllowNoJobs() != null) {
|
||||
params.putParam("allow_no_jobs", Boolean.toString(getJobRequest.getAllowNoJobs()));
|
||||
}
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -122,10 +122,11 @@ final class MLRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (getJobStatsRequest.getAllowNoJobs() != null) {
|
||||
params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.getAllowNoJobs()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -171,14 +172,14 @@ final class MLRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (deleteJobRequest.getForce() != null) {
|
||||
params.putParam("force", Boolean.toString(deleteJobRequest.getForce()));
|
||||
}
|
||||
if (deleteJobRequest.getWaitForCompletion() != null) {
|
||||
params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion()));
|
||||
}
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -249,12 +250,12 @@ final class MLRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (getDatafeedRequest.getAllowNoDatafeeds() != null) {
|
||||
params.putParam(GetDatafeedRequest.ALLOW_NO_DATAFEEDS.getPreferredName(),
|
||||
Boolean.toString(getDatafeedRequest.getAllowNoDatafeeds()));
|
||||
}
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -265,10 +266,11 @@ final class MLRequestConverters {
|
|||
.addPathPart(deleteDatafeedRequest.getDatafeedId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (deleteDatafeedRequest.getForce() != null) {
|
||||
params.putParam("force", Boolean.toString(deleteDatafeedRequest.getForce()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -305,10 +307,11 @@ final class MLRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (getDatafeedStatsRequest.getAllowNoDatafeeds() != null) {
|
||||
params.putParam("allow_no_datafeeds", Boolean.toString(getDatafeedStatsRequest.getAllowNoDatafeeds()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -331,13 +334,14 @@ final class MLRequestConverters {
|
|||
.addPathPart(Strings.collectionToCommaDelimitedString(deleteForecastRequest.getForecastIds()))
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (deleteForecastRequest.getAllowNoForecasts() != null) {
|
||||
params.putParam("allow_no_forecasts", Boolean.toString(deleteForecastRequest.getAllowNoForecasts()));
|
||||
}
|
||||
if (deleteForecastRequest.timeout() != null) {
|
||||
params.putParam("timeout", deleteForecastRequest.timeout().getStringRep());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -453,7 +457,7 @@ final class MLRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (postDataRequest.getResetStart() != null) {
|
||||
params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart());
|
||||
}
|
||||
|
@ -461,6 +465,7 @@ final class MLRequestConverters {
|
|||
params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd());
|
||||
}
|
||||
BytesReference content = postDataRequest.getContent();
|
||||
request.addParameters(params.asMap());
|
||||
if (content != null) {
|
||||
BytesRef source = postDataRequest.getContent().toBytesRef();
|
||||
HttpEntity byteEntity = new NByteArrayEntity(source.bytes,
|
||||
|
@ -594,13 +599,14 @@ final class MLRequestConverters {
|
|||
.addPathPart(getFiltersRequest.getFilterId())
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (getFiltersRequest.getSize() != null) {
|
||||
params.putParam(PageParams.SIZE.getPreferredName(), getFiltersRequest.getSize().toString());
|
||||
}
|
||||
if (getFiltersRequest.getFrom() != null) {
|
||||
params.putParam(PageParams.FROM.getPreferredName(), getFiltersRequest.getFrom().toString());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -628,11 +634,12 @@ final class MLRequestConverters {
|
|||
static Request setUpgradeMode(SetUpgradeModeRequest setUpgradeModeRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "set_upgrade_mode").build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.putParam(SetUpgradeModeRequest.ENABLED.getPreferredName(), Boolean.toString(setUpgradeModeRequest.isEnabled()));
|
||||
if (setUpgradeModeRequest.getTimeout() != null) {
|
||||
params.putParam(SetUpgradeModeRequest.TIMEOUT.getPreferredName(), setUpgradeModeRequest.getTimeout().toString());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -650,7 +657,7 @@ final class MLRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (findFileStructureRequest.getLinesToSample() != null) {
|
||||
params.putParam(FindFileStructureRequest.LINES_TO_SAMPLE.getPreferredName(),
|
||||
findFileStructureRequest.getLinesToSample().toString());
|
||||
|
@ -695,7 +702,7 @@ final class MLRequestConverters {
|
|||
if (findFileStructureRequest.getExplain() != null) {
|
||||
params.putParam(FindFileStructureRequest.EXPLAIN.getPreferredName(), findFileStructureRequest.getExplain().toString());
|
||||
}
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
BytesReference sample = findFileStructureRequest.getSample();
|
||||
BytesRef source = sample.toBytesRef();
|
||||
HttpEntity byteEntity = new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(XContentType.JSON));
|
||||
|
|
|
@ -88,8 +88,10 @@ import java.io.IOException;
|
|||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
final class RequestConverters {
|
||||
|
@ -103,7 +105,7 @@ final class RequestConverters {
|
|||
String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
Params parameters = new Params();
|
||||
parameters.withRouting(deleteRequest.routing());
|
||||
parameters.withTimeout(deleteRequest.timeout());
|
||||
parameters.withVersion(deleteRequest.version());
|
||||
|
@ -112,6 +114,7 @@ final class RequestConverters {
|
|||
parameters.withIfPrimaryTerm(deleteRequest.ifPrimaryTerm());
|
||||
parameters.withRefreshPolicy(deleteRequest.getRefreshPolicy());
|
||||
parameters.withWaitForActiveShards(deleteRequest.waitForActiveShards());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -122,7 +125,7 @@ final class RequestConverters {
|
|||
static Request bulk(BulkRequest bulkRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_bulk");
|
||||
|
||||
Params parameters = new Params(request);
|
||||
Params parameters = new Params();
|
||||
parameters.withTimeout(bulkRequest.timeout());
|
||||
parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy());
|
||||
parameters.withPipeline(bulkRequest.pipeline());
|
||||
|
@ -249,6 +252,7 @@ final class RequestConverters {
|
|||
content.write(separator);
|
||||
}
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(new NByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType));
|
||||
return request;
|
||||
}
|
||||
|
@ -264,7 +268,7 @@ final class RequestConverters {
|
|||
private static Request getStyleRequest(String method, GetRequest getRequest) {
|
||||
Request request = new Request(method, endpoint(getRequest.index(), getRequest.type(), getRequest.id()));
|
||||
|
||||
Params parameters = new Params(request);
|
||||
Params parameters = new Params();
|
||||
parameters.withPreference(getRequest.preference());
|
||||
parameters.withRouting(getRequest.routing());
|
||||
parameters.withRefresh(getRequest.refresh());
|
||||
|
@ -273,7 +277,7 @@ final class RequestConverters {
|
|||
parameters.withVersion(getRequest.version());
|
||||
parameters.withVersionType(getRequest.versionType());
|
||||
parameters.withFetchSourceContext(getRequest.fetchSourceContext());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -286,23 +290,24 @@ final class RequestConverters {
|
|||
endpoint = endpoint(getRequest.index(), optionalType, getRequest.id(), "_source");
|
||||
}
|
||||
Request request = new Request(HttpHead.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
Params parameters = new Params();
|
||||
parameters.withPreference(getRequest.preference());
|
||||
parameters.withRouting(getRequest.routing());
|
||||
parameters.withRefresh(getRequest.refresh());
|
||||
parameters.withRealtime(getRequest.realtime());
|
||||
// Version params are not currently supported by the source exists API so are not passed
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request multiGet(MultiGetRequest multiGetRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_mget");
|
||||
|
||||
Params parameters = new Params(request);
|
||||
Params parameters = new Params();
|
||||
parameters.withPreference(multiGetRequest.preference());
|
||||
parameters.withRealtime(multiGetRequest.realtime());
|
||||
parameters.withRefresh(multiGetRequest.refresh());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(createEntity(multiGetRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -321,7 +326,7 @@ final class RequestConverters {
|
|||
|
||||
Request request = new Request(method, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
Params parameters = new Params();
|
||||
parameters.withRouting(indexRequest.routing());
|
||||
parameters.withTimeout(indexRequest.timeout());
|
||||
parameters.withVersion(indexRequest.version());
|
||||
|
@ -334,6 +339,7 @@ final class RequestConverters {
|
|||
|
||||
BytesRef source = indexRequest.source().toBytesRef();
|
||||
ContentType contentType = createContentType(indexRequest.getContentType());
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(new NByteArrayEntity(source.bytes, source.offset, source.length, contentType));
|
||||
return request;
|
||||
}
|
||||
|
@ -348,7 +354,7 @@ final class RequestConverters {
|
|||
: endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
Params parameters = new Params();
|
||||
parameters.withRouting(updateRequest.routing());
|
||||
parameters.withTimeout(updateRequest.timeout());
|
||||
parameters.withRefreshPolicy(updateRequest.getRefreshPolicy());
|
||||
|
@ -379,6 +385,7 @@ final class RequestConverters {
|
|||
if (xContentType == null) {
|
||||
xContentType = Requests.INDEX_CONTENT_TYPE;
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(createEntity(updateRequest, xContentType));
|
||||
return request;
|
||||
}
|
||||
|
@ -393,12 +400,13 @@ final class RequestConverters {
|
|||
static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint));
|
||||
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
addSearchRequestParams(params, searchRequest);
|
||||
|
||||
if (searchRequest.source() != null) {
|
||||
request.setEntity(createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -436,7 +444,7 @@ final class RequestConverters {
|
|||
static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_msearch");
|
||||
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
|
||||
if (multiSearchRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) {
|
||||
params.putParam("max_concurrent_searches", Integer.toString(multiSearchRequest.maxConcurrentSearchRequests()));
|
||||
|
@ -444,6 +452,7 @@ final class RequestConverters {
|
|||
|
||||
XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent();
|
||||
byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent);
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type())));
|
||||
return request;
|
||||
}
|
||||
|
@ -458,8 +467,9 @@ final class RequestConverters {
|
|||
String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template");
|
||||
request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
addSearchRequestParams(params, searchRequest);
|
||||
request.addParameters(params.asMap());
|
||||
}
|
||||
|
||||
request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
|
@ -469,7 +479,7 @@ final class RequestConverters {
|
|||
static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_msearch/template");
|
||||
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
|
||||
if (multiSearchTemplateRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) {
|
||||
params.putParam("max_concurrent_searches", Integer.toString(multiSearchTemplateRequest.maxConcurrentSearchRequests()));
|
||||
|
@ -483,10 +493,11 @@ final class RequestConverters {
|
|||
|
||||
static Request count(CountRequest countRequest) throws IOException {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(countRequest.indices(), countRequest.types(), "_count"));
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withRouting(countRequest.routing());
|
||||
params.withPreference(countRequest.preference());
|
||||
params.withIndicesOptions(countRequest.indicesOptions());
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(countRequest.source(), REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -497,11 +508,12 @@ final class RequestConverters {
|
|||
: endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain");
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withStoredFields(explainRequest.storedFields());
|
||||
params.withFetchSourceContext(explainRequest.fetchSourceContext());
|
||||
params.withRouting(explainRequest.routing());
|
||||
params.withPreference(explainRequest.preference());
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(explainRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -509,18 +521,19 @@ final class RequestConverters {
|
|||
static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps"));
|
||||
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withFields(fieldCapabilitiesRequest.fields());
|
||||
params.withIndicesOptions(fieldCapabilitiesRequest.indicesOptions());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request rankEval(RankEvalRequest rankEvalRequest) throws IOException {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint(rankEvalRequest.indices(), Strings.EMPTY_ARRAY, "_rank_eval"));
|
||||
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withIndicesOptions(rankEvalRequest.indicesOptions());
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(rankEvalRequest.getRankEvalSpec(), REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -536,7 +549,7 @@ final class RequestConverters {
|
|||
private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request)
|
||||
Params params = new Params()
|
||||
.withWaitForCompletion(waitForCompletion)
|
||||
.withRefresh(reindexRequest.isRefresh())
|
||||
.withTimeout(reindexRequest.getTimeout())
|
||||
|
@ -546,6 +559,7 @@ final class RequestConverters {
|
|||
if (reindexRequest.getScrollTime() != null) {
|
||||
params.putParam("scroll", reindexRequest.getScrollTime());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(reindexRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -554,7 +568,7 @@ final class RequestConverters {
|
|||
String endpoint =
|
||||
endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request)
|
||||
Params params = new Params()
|
||||
.withRouting(updateByQueryRequest.getRouting())
|
||||
.withPipeline(updateByQueryRequest.getPipeline())
|
||||
.withRefresh(updateByQueryRequest.isRefresh())
|
||||
|
@ -574,6 +588,7 @@ final class RequestConverters {
|
|||
if (updateByQueryRequest.getSize() > 0) {
|
||||
params.putParam("size", Integer.toString(updateByQueryRequest.getSize()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(updateByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -582,7 +597,7 @@ final class RequestConverters {
|
|||
String endpoint =
|
||||
endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request)
|
||||
Params params = new Params()
|
||||
.withRouting(deleteByQueryRequest.getRouting())
|
||||
.withRefresh(deleteByQueryRequest.isRefresh())
|
||||
.withTimeout(deleteByQueryRequest.getTimeout())
|
||||
|
@ -601,6 +616,7 @@ final class RequestConverters {
|
|||
if (deleteByQueryRequest.getSize() > 0) {
|
||||
params.putParam("size", Integer.toString(deleteByQueryRequest.getSize()));
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(deleteByQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -621,22 +637,24 @@ final class RequestConverters {
|
|||
String endpoint = new EndpointBuilder().addPathPart(firstPathPart).addPathPart(rethrottleRequest.getTaskId().toString())
|
||||
.addPathPart("_rethrottle").build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request)
|
||||
Params params = new Params()
|
||||
.withRequestsPerSecond(rethrottleRequest.getRequestsPerSecond());
|
||||
// we set "group_by" to "none" because this is the response format we can parse back
|
||||
params.putParam("group_by", "none");
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withTimeout(putStoredScriptRequest.timeout());
|
||||
params.withMasterTimeout(putStoredScriptRequest.masterNodeTimeout());
|
||||
if (Strings.hasText(putStoredScriptRequest.context())) {
|
||||
params.putParam("context", putStoredScriptRequest.context());
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(putStoredScriptRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -667,11 +685,12 @@ final class RequestConverters {
|
|||
}
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withRouting(tvrequest.getRouting());
|
||||
params.withPreference(tvrequest.getPreference());
|
||||
params.withFields(tvrequest.getFields());
|
||||
params.withRealtime(tvrequest.getRealtime());
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(createEntity(tvrequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -686,17 +705,19 @@ final class RequestConverters {
|
|||
static Request getScript(GetStoredScriptRequest getStoredScriptRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withMasterTimeout(getStoredScriptRequest.masterNodeTimeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(deleteStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
Params params = new Params();
|
||||
params.withTimeout(deleteStoredScriptRequest.timeout());
|
||||
params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -756,15 +777,14 @@ final class RequestConverters {
|
|||
* a {@link Request} and adds the parameters to it directly.
|
||||
*/
|
||||
static class Params {
|
||||
private final Request request;
|
||||
private final Map<String,String> parameters = new HashMap<>();
|
||||
|
||||
Params(Request request) {
|
||||
this.request = request;
|
||||
Params() {
|
||||
}
|
||||
|
||||
Params putParam(String name, String value) {
|
||||
if (Strings.hasLength(value)) {
|
||||
request.addParameter(name, value);
|
||||
parameters.put(name,value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -776,6 +796,10 @@ final class RequestConverters {
|
|||
return this;
|
||||
}
|
||||
|
||||
Map<String, String> asMap(){
|
||||
return parameters;
|
||||
}
|
||||
|
||||
Params withDocAsUpsert(boolean docAsUpsert) {
|
||||
if (docAsUpsert) {
|
||||
return putParam("doc_as_upsert", Boolean.TRUE.toString());
|
||||
|
@ -939,6 +963,7 @@ final class RequestConverters {
|
|||
expandWildcards = joiner.toString();
|
||||
}
|
||||
putParam("expand_wildcards", expandWildcards);
|
||||
putParam("ignore_throttled", Boolean.toString(indicesOptions.ignoreThrottled()));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -68,11 +68,12 @@ final class RollupRequestConverters {
|
|||
.build();
|
||||
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withTimeout(stopRollupJobRequest.timeout());
|
||||
if (stopRollupJobRequest.waitForCompletion() != null) {
|
||||
parameters.withWaitForCompletion(stopRollupJobRequest.waitForCompletion());
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -66,8 +66,9 @@ final class SecurityRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -87,8 +88,9 @@ final class SecurityRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putUserRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(putUserRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -98,8 +100,9 @@ final class SecurityRequestConverters {
|
|||
.addPathPart(deleteUserRequest.getName())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(deleteUserRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -110,8 +113,9 @@ final class SecurityRequestConverters {
|
|||
.build();
|
||||
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putRoleMappingRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
final RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
final RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(putRoleMappingRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -139,8 +143,9 @@ final class SecurityRequestConverters {
|
|||
.addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable")
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -161,8 +166,9 @@ final class SecurityRequestConverters {
|
|||
final String endpoint = builder.addPathPartAsIs("_clear_cache").build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
if (clearRealmCacheRequest.getUsernames().isEmpty() == false) {
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.putParam("usernames", Strings.collectionToCommaDelimitedString(clearRealmCacheRequest.getUsernames()));
|
||||
request.addParameters(params.asMap());
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
@ -182,8 +188,9 @@ final class SecurityRequestConverters {
|
|||
.addPathPart(deleteRoleMappingRequest.getName())
|
||||
.build();
|
||||
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
final RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
final RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(deleteRoleMappingRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -193,8 +200,9 @@ final class SecurityRequestConverters {
|
|||
.addPathPart(deleteRoleRequest.getName())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(deleteRoleRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -231,8 +239,9 @@ final class SecurityRequestConverters {
|
|||
static Request putPrivileges(final PutPrivilegesRequest putPrivilegesRequest) throws IOException {
|
||||
Request request = new Request(HttpPut.METHOD_NAME, "/_security/privilege");
|
||||
request.setEntity(createEntity(putPrivilegesRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(putPrivilegesRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -243,8 +252,9 @@ final class SecurityRequestConverters {
|
|||
.addCommaSeparatedPathParts(deletePrivilegeRequest.getPrivileges())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(deletePrivilegeRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -255,16 +265,18 @@ final class SecurityRequestConverters {
|
|||
.build();
|
||||
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(putRoleRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
final RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
final RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(putRoleRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createApiKey(final CreateApiKeyRequest createApiKeyRequest) throws IOException {
|
||||
final Request request = new Request(HttpPost.METHOD_NAME, "/_security/api_key");
|
||||
request.setEntity(createEntity(createApiKeyRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
final RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
final RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withRefreshPolicy(createApiKeyRequest.getRefreshPolicy());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -282,13 +294,13 @@ final class SecurityRequestConverters {
|
|||
if (Strings.hasText(getApiKeyRequest.getRealmName())) {
|
||||
request.addParameter("realm_name", getApiKeyRequest.getRealmName());
|
||||
}
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request invalidateApiKey(final InvalidateApiKeyRequest invalidateApiKeyRequest) throws IOException {
|
||||
final Request request = new Request(HttpDelete.METHOD_NAME, "/_security/api_key");
|
||||
request.setEntity(createEntity(invalidateApiKeyRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
final RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,9 +46,10 @@ final class SnapshotRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout());
|
||||
parameters.withLocal(getRepositoriesRequest.local());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -56,11 +57,11 @@ final class SnapshotRequestConverters {
|
|||
String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(putRepositoryRequest.timeout());
|
||||
parameters.withVerify(putRepositoryRequest.verify());
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(putRepositoryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -70,9 +71,10 @@ final class SnapshotRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(deleteRepositoryRequest.timeout());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -83,9 +85,10 @@ final class SnapshotRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(verifyRepositoryRequest.timeout());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -95,9 +98,10 @@ final class SnapshotRequestConverters {
|
|||
.addPathPart(createSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
|
||||
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(createSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -114,11 +118,11 @@ final class SnapshotRequestConverters {
|
|||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
|
||||
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
|
||||
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
|
||||
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -130,9 +134,10 @@ final class SnapshotRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
|
||||
parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -143,9 +148,10 @@ final class SnapshotRequestConverters {
|
|||
.addPathPartAsIs("_restore")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout());
|
||||
parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion());
|
||||
request.addParameters(parameters.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(restoreSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
@ -157,8 +163,9 @@ final class SnapshotRequestConverters {
|
|||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,12 +32,13 @@ final class TasksRequestConverters {
|
|||
|
||||
static Request cancelTasks(CancelTasksRequest cancelTasksRequest) {
|
||||
Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel");
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withTimeout(cancelTasksRequest.getTimeout())
|
||||
.withTaskId(cancelTasksRequest.getTaskId())
|
||||
.withNodes(cancelTasksRequest.getNodes())
|
||||
.withParentTaskId(cancelTasksRequest.getParentTaskId())
|
||||
.withActions(cancelTasksRequest.getActions());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -46,7 +47,7 @@ final class TasksRequestConverters {
|
|||
throw new IllegalArgumentException("TaskId cannot be used for list tasks request");
|
||||
}
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withTimeout(listTaskRequest.getTimeout())
|
||||
.withDetailed(listTaskRequest.getDetailed())
|
||||
.withWaitForCompletion(listTaskRequest.getWaitForCompletion())
|
||||
|
@ -54,6 +55,7 @@ final class TasksRequestConverters {
|
|||
.withNodes(listTaskRequest.getNodes())
|
||||
.withActions(listTaskRequest.getActions())
|
||||
.putParam("group_by", "none");
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
@ -62,9 +64,10 @@ final class TasksRequestConverters {
|
|||
.addPathPartAsIs(getTaskRequest.getNodeId() + ":" + Long.toString(getTaskRequest.getTaskId()))
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
params.withTimeout(getTaskRequest.getTimeout())
|
||||
.withWaitForCompletion(getTaskRequest.getWaitForCompletion());
|
||||
request.addParameters(params.asMap());
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -69,12 +69,13 @@ final class WatcherRequestConverters {
|
|||
.build();
|
||||
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request)
|
||||
RequestConverters.Params params = new RequestConverters.Params()
|
||||
.withIfSeqNo(putWatchRequest.ifSeqNo())
|
||||
.withIfPrimaryTerm(putWatchRequest.ifPrimaryTerm());
|
||||
if (putWatchRequest.isActive() == false) {
|
||||
params.putParam("active", "false");
|
||||
}
|
||||
request.addParameters(params.asMap());
|
||||
ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType());
|
||||
BytesReference source = putWatchRequest.getSource();
|
||||
request.setEntity(new NByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
|
||||
|
@ -118,7 +119,7 @@ final class WatcherRequestConverters {
|
|||
.addPathPartAsIs("_execute").build();
|
||||
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
RequestConverters.Params params = new RequestConverters.Params();
|
||||
if (executeWatchRequest.isDebug()) {
|
||||
params.putParam("debug", "true");
|
||||
}
|
||||
|
@ -128,7 +129,7 @@ final class WatcherRequestConverters {
|
|||
if (executeWatchRequest.recordExecution()) {
|
||||
params.putParam("record_execution", "true");
|
||||
}
|
||||
|
||||
request.addParameters(params.asMap());
|
||||
request.setEntity(RequestConverters.createEntity(executeWatchRequest, XContentType.JSON));
|
||||
return request;
|
||||
}
|
||||
|
@ -158,7 +159,7 @@ final class WatcherRequestConverters {
|
|||
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_watcher", "stats");
|
||||
String endpoint = builder.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
StringBuilder metric = new StringBuilder();
|
||||
if (watcherStatsRequest.includeCurrentWatches()) {
|
||||
metric.append("current_watches");
|
||||
|
@ -172,6 +173,7 @@ final class WatcherRequestConverters {
|
|||
if (metric.length() > 0) {
|
||||
parameters.putParam("metric", metric.toString());
|
||||
}
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,8 +46,9 @@ final class XPackRequestConverters {
|
|||
|
||||
static Request usage(XPackUsageRequest usageRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage");
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params();
|
||||
parameters.withMasterTimeout(usageRequest.masterNodeTimeout());
|
||||
request.addParameters(parameters.asMap());
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,21 +30,19 @@ import java.util.List;
|
|||
|
||||
public class StartDataFrameTransformResponse extends AcknowledgedTasksResponse {
|
||||
|
||||
private static final String STARTED = "started";
|
||||
private static final String ACKNOWLEDGED = "acknowledged";
|
||||
|
||||
private static final ConstructingObjectParser<StartDataFrameTransformResponse, Void> PARSER =
|
||||
AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new, STARTED);
|
||||
AcknowledgedTasksResponse.generateParser("start_data_frame_transform_response", StartDataFrameTransformResponse::new,
|
||||
ACKNOWLEDGED);
|
||||
|
||||
public static StartDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
public StartDataFrameTransformResponse(boolean started, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
public StartDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
@Nullable List<? extends ElasticsearchException> nodeFailures) {
|
||||
super(started, taskFailures, nodeFailures);
|
||||
super(acknowledged, taskFailures, nodeFailures);
|
||||
}
|
||||
|
||||
public boolean isStarted() {
|
||||
return isAcknowledged();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,21 +30,18 @@ import java.util.List;
|
|||
|
||||
public class StopDataFrameTransformResponse extends AcknowledgedTasksResponse {
|
||||
|
||||
private static final String STOPPED = "stopped";
|
||||
private static final String ACKNOWLEDGED = "acknowledged";
|
||||
|
||||
private static final ConstructingObjectParser<StopDataFrameTransformResponse, Void> PARSER =
|
||||
AcknowledgedTasksResponse.generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, STOPPED);
|
||||
private static final ConstructingObjectParser<StopDataFrameTransformResponse, Void> PARSER = AcknowledgedTasksResponse
|
||||
.generateParser("stop_data_frame_transform_response", StopDataFrameTransformResponse::new, ACKNOWLEDGED);
|
||||
|
||||
public static StopDataFrameTransformResponse fromXContent(final XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
public StopDataFrameTransformResponse(boolean stopped, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
public StopDataFrameTransformResponse(boolean acknowledged, @Nullable List<TaskOperationFailure> taskFailures,
|
||||
@Nullable List<? extends ElasticsearchException> nodeFailures) {
|
||||
super(stopped, taskFailures, nodeFailures);
|
||||
super(acknowledged, taskFailures, nodeFailures);
|
||||
}
|
||||
|
||||
public boolean isStopped() {
|
||||
return isAcknowledged();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -258,7 +258,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
StartDataFrameTransformRequest startRequest = new StartDataFrameTransformRequest(id);
|
||||
StartDataFrameTransformResponse startResponse =
|
||||
execute(startRequest, client::startDataFrameTransform, client::startDataFrameTransformAsync);
|
||||
assertTrue(startResponse.isStarted());
|
||||
assertTrue(startResponse.isAcknowledged());
|
||||
assertThat(startResponse.getNodeFailures(), empty());
|
||||
assertThat(startResponse.getTaskFailures(), empty());
|
||||
|
||||
|
@ -271,7 +271,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
StopDataFrameTransformRequest stopRequest = new StopDataFrameTransformRequest(id, Boolean.TRUE, null);
|
||||
StopDataFrameTransformResponse stopResponse =
|
||||
execute(stopRequest, client::stopDataFrameTransform, client::stopDataFrameTransformAsync);
|
||||
assertTrue(stopResponse.isStopped());
|
||||
assertTrue(stopResponse.isAcknowledged());
|
||||
assertThat(stopResponse.getNodeFailures(), empty());
|
||||
assertThat(stopResponse.getTaskFailures(), empty());
|
||||
}
|
||||
|
@ -358,7 +358,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
|||
StartDataFrameTransformResponse startTransformResponse = execute(new StartDataFrameTransformRequest(id),
|
||||
client::startDataFrameTransform,
|
||||
client::startDataFrameTransformAsync);
|
||||
assertThat(startTransformResponse.isStarted(), is(true));
|
||||
assertThat(startTransformResponse.isAcknowledged(), is(true));
|
||||
assertBusy(() -> {
|
||||
GetDataFrameTransformStatsResponse response = execute(new GetDataFrameTransformStatsRequest(id),
|
||||
client::getDataFrameTransformStats, client::getDataFrameTransformStatsAsync);
|
||||
|
|
|
@ -1567,7 +1567,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
endpoint.add("_field_caps");
|
||||
|
||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||
assertEquals(4, request.getParameters().size());
|
||||
assertEquals(5, request.getParameters().size());
|
||||
|
||||
// Note that we don't check the field param value explicitly, as field names are
|
||||
// passed through
|
||||
|
@ -1601,7 +1601,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
endpoint.add(RestRankEvalAction.ENDPOINT);
|
||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||
assertEquals(3, request.getParameters().size());
|
||||
assertEquals(4, request.getParameters().size());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertToXContentBody(spec, request.getEntity());
|
||||
}
|
||||
|
@ -1928,7 +1928,8 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
Map<String, String> expectedParams) {
|
||||
|
||||
if (randomBoolean()) {
|
||||
setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
|
||||
setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(),
|
||||
true, false, false, randomBoolean()));
|
||||
}
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(getter.get().ignoreUnavailable()));
|
||||
expectedParams.put("allow_no_indices", Boolean.toString(getter.get().allowNoIndices()));
|
||||
|
@ -1941,11 +1942,13 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
} else {
|
||||
expectedParams.put("expand_wildcards", "none");
|
||||
}
|
||||
expectedParams.put("ignore_throttled", Boolean.toString(getter.get().ignoreThrottled()));
|
||||
}
|
||||
|
||||
static IndicesOptions setRandomIndicesOptions(IndicesOptions indicesOptions, Map<String, String> expectedParams) {
|
||||
if (randomBoolean()) {
|
||||
indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean());
|
||||
indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(),
|
||||
true, false, false, randomBoolean());
|
||||
}
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable()));
|
||||
expectedParams.put("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices()));
|
||||
|
@ -1958,6 +1961,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
} else {
|
||||
expectedParams.put("expand_wildcards", "none");
|
||||
}
|
||||
expectedParams.put("ignore_throttled", Boolean.toString(indicesOptions.ignoreThrottled()));
|
||||
return indicesOptions;
|
||||
}
|
||||
|
||||
|
|
|
@ -244,7 +244,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
request, RequestOptions.DEFAULT);
|
||||
// end::start-data-frame-transform-execute
|
||||
|
||||
assertTrue(response.isStarted());
|
||||
assertTrue(response.isAcknowledged());
|
||||
}
|
||||
{
|
||||
// tag::stop-data-frame-transform-request
|
||||
|
@ -263,7 +263,7 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
|||
request, RequestOptions.DEFAULT);
|
||||
// end::stop-data-frame-transform-execute
|
||||
|
||||
assertTrue(response.isStopped());
|
||||
assertTrue(response.isAcknowledged());
|
||||
}
|
||||
{
|
||||
// tag::start-data-frame-transform-execute-listener
|
||||
|
|
|
@ -81,6 +81,10 @@ public final class Request {
|
|||
}
|
||||
}
|
||||
|
||||
public void addParameters(Map<String, String> paramSource){
|
||||
paramSource.forEach(this::addParameter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query string parameters. The returned map is an unmodifiable view of the
|
||||
* map in the request so calls to {@link #addParameter(String, String)}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
[role="xpack"]
|
||||
[testenv="platinum"]
|
||||
`max_read_request_operation_count`::
|
||||
(integer) the maximum number of operations to pull per read from the remote
|
||||
|
|
|
@ -41,7 +41,7 @@ When the {dataframe-transform} starts, you receive the following results:
|
|||
[source,js]
|
||||
----
|
||||
{
|
||||
"started" : true
|
||||
"acknowledged" : true
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE
|
||||
|
|
|
@ -61,7 +61,7 @@ When the {dataframe-transform} stops, you receive the following results:
|
|||
[source,js]
|
||||
----
|
||||
{
|
||||
"stopped" : true
|
||||
"acknowledged" : true
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE
|
||||
|
|
|
@ -16,7 +16,7 @@ PUT trips
|
|||
},
|
||||
"route_length_miles": {
|
||||
"type": "alias",
|
||||
"path": "distance" // <1>
|
||||
"path": "distance" <1>
|
||||
},
|
||||
"transit_mode": {
|
||||
"type": "keyword"
|
||||
|
|
|
@ -193,7 +193,7 @@ phase. Instead, highlighting needs to be performed via
|
|||
|
||||
=============================================
|
||||
|
||||
|
||||
[[limit-number-nested-fields]]
|
||||
==== Limiting the number of `nested` fields
|
||||
|
||||
Indexing a document with 100 nested fields actually indexes 101 documents as each nested
|
||||
|
|
|
@ -11,11 +11,13 @@
|
|||
|
||||
|
||||
[float]
|
||||
[[removed-global-ordinals-hash-and-global-ordinals-low-cardinality-terms-agg]]
|
||||
==== Deprecated `global_ordinals_hash` and `global_ordinals_low_cardinality` execution hints for terms aggregations have been removed
|
||||
|
||||
These `execution_hint` are removed and should be replaced by `global_ordinals`.
|
||||
|
||||
[float]
|
||||
[[search-max-buckets-cluster-setting]]
|
||||
==== `search.max_buckets` in the cluster setting
|
||||
|
||||
The dynamic cluster setting named `search.max_buckets` now defaults
|
||||
|
@ -23,12 +25,14 @@ to 10,000 (instead of unlimited in the previous version).
|
|||
Requests that try to return more than the limit will fail with an exception.
|
||||
|
||||
[float]
|
||||
[[missing-option-removed-composite-agg]]
|
||||
==== `missing` option of the `composite` aggregation has been removed
|
||||
|
||||
The `missing` option of the `composite` aggregation, deprecated in 6.x,
|
||||
has been removed. `missing_bucket` should be used instead.
|
||||
|
||||
[float]
|
||||
[[replace-params-agg-with-state-context-variable]]
|
||||
==== Replaced `params._agg` with `state` context variable in scripted metric aggregations
|
||||
|
||||
The object used to share aggregation state between the scripts in a Scripted Metric
|
||||
|
@ -36,12 +40,14 @@ Aggregation is now a variable called `state` available in the script context, ra
|
|||
being provided via the `params` object as `params._agg`.
|
||||
|
||||
[float]
|
||||
[[reduce-script-combine-script-params-mandatory]]
|
||||
==== Make metric aggregation script parameters `reduce_script` and `combine_script` mandatory
|
||||
|
||||
The metric aggregation has been changed to require these two script parameters to ensure users are
|
||||
explicitly defining how their data is processed.
|
||||
|
||||
[float]
|
||||
[[percentiles-percentile-ranks-return-null-instead-nan]]
|
||||
==== `percentiles` and `percentile_ranks` now return `null` instead of `NaN`
|
||||
|
||||
The `percentiles` and `percentile_ranks` aggregations used to return `NaN` in
|
||||
|
@ -49,6 +55,7 @@ the response if they were applied to an empty set of values. Because `NaN` is
|
|||
not officially supported by JSON, it has been replaced with `null`.
|
||||
|
||||
[float]
|
||||
[[stats-extended-stats-return-zero-instead-null]]
|
||||
==== `stats` and `extended_stats` now return 0 instead of `null` for zero docs
|
||||
|
||||
When the `stats` and `extended_stats` aggregations collected zero docs (`doc_count: 0`),
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
// end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[limit-number-of-tokens-produced-by-analyze]]
|
||||
==== Limiting the number of tokens produced by _analyze
|
||||
|
||||
To safeguard against out of memory errors, the number of tokens that can be produced
|
||||
|
@ -27,6 +28,7 @@ limited to 1000000. This default limit can be changed
|
|||
for a particular index with the index setting `index.highlight.max_analyzed_offset`.
|
||||
|
||||
[float]
|
||||
[[delimited-payload-filter-renaming]]
|
||||
==== `delimited_payload_filter` renaming
|
||||
|
||||
The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2.
|
||||
|
@ -35,6 +37,7 @@ name in new indices created in 7.0 will throw an error. Use the new name `delimi
|
|||
instead.
|
||||
|
||||
[float]
|
||||
[[standard-filter-removed]]
|
||||
==== `standard` filter has been removed
|
||||
|
||||
The `standard` token filter has been removed because it doesn't change anything in the stream.
|
||||
|
@ -48,6 +51,7 @@ Indexes created using this analyzer will still be readable in elasticsearch 7.0,
|
|||
but it will not be possible to create new indexes using it.
|
||||
|
||||
[float]
|
||||
[[deprecated-ngram-edgengram-token-filter-cannot-be-used]]
|
||||
==== The deprecated `nGram` and `edgeNGram` token filter cannot be used on new indices
|
||||
|
||||
The `nGram` and `edgeNGram` token filter names have been deprecated in an earlier 6.x version.
|
||||
|
|
|
@ -76,6 +76,7 @@ pools. Note that `core` and `max` will be populated for scaling thread pools,
|
|||
and `size` will be populated for fixed thread pools.
|
||||
|
||||
[float]
|
||||
[[fields-param-removed-bulk-update-request]]
|
||||
==== The parameter `fields` deprecated in 6.x has been removed from Bulk request
|
||||
and Update request. The Update API returns `400 - Bad request` if request contains
|
||||
unknown parameters (instead of ignored in the previous version).
|
||||
|
@ -118,6 +119,7 @@ body. Specifying `fields` in the request body as opposed to a parameter was depr
|
|||
in 6.4.0, and is now unsupported in 7.0.0.
|
||||
|
||||
[float]
|
||||
[[copy-settings-deprecated-shrink-split-apis]]
|
||||
==== `copy_settings` is deprecated on shrink and split APIs
|
||||
|
||||
Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink
|
||||
|
@ -143,6 +145,7 @@ current user was not authorized for any alias. An empty response with
|
|||
status 200 - OK is now returned instead at all times.
|
||||
|
||||
[float]
|
||||
[[user-object-removed-put-user-api]]
|
||||
==== Put User API response no longer has `user` object
|
||||
|
||||
The Put User API response was changed in 6.5.0 to add the `created` field
|
||||
|
@ -150,6 +153,7 @@ outside of the user object where it previously had been. In 7.0.0 the user
|
|||
object has been removed in favor of the top level `created` field.
|
||||
|
||||
[float]
|
||||
[[source-include-exclude-params-removed]]
|
||||
==== Source filtering url parameters `_source_include` and `_source_exclude` have been removed
|
||||
|
||||
The deprecated in 6.x url parameters are now removed. Use `_source_includes` and `_source_excludes` instead.
|
||||
|
@ -168,6 +172,7 @@ removed.
|
|||
|
||||
|
||||
[float]
|
||||
[[deprecated-termvector-endpoint-removed]]
|
||||
==== Deprecated `_termvector` endpoint removed
|
||||
|
||||
The `_termvector` endpoint was deprecated in 2.0 and has now been removed.
|
||||
|
@ -186,6 +191,7 @@ using the `allow_restricted_indices` flag on the permission (as any other index
|
|||
privilege).
|
||||
|
||||
[float]
|
||||
[[remove-get-support-cache-clear-api]]
|
||||
==== Removed support for `GET` on the `_cache/clear` API
|
||||
|
||||
The `_cache/clear` API no longer supports the `GET` HTTP verb. It must be called
|
||||
|
|
|
@ -17,6 +17,7 @@ Due to cross-cluster search using `:` to separate a cluster and index name,
|
|||
cluster names may no longer contain `:`.
|
||||
|
||||
[float]
|
||||
[[new-default-wait-for-active-shards-param]]
|
||||
==== New default for `wait_for_active_shards` parameter of the open index command
|
||||
|
||||
The default value for the `wait_for_active_shards` parameter of the open index API
|
||||
|
@ -24,6 +25,7 @@ is changed from 0 to 1, which means that the command will now by default wait fo
|
|||
primary shards of the opened index to be allocated.
|
||||
|
||||
[float]
|
||||
[[shard-preferences-removed]]
|
||||
==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed
|
||||
These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences.
|
||||
|
||||
|
|
|
@ -53,6 +53,7 @@ above. If you are preparing to upgrade from an earlier version, you must set
|
|||
`discovery.zen.ping.unicast.hosts` or `discovery.zen.hosts_provider`.
|
||||
|
||||
[float]
|
||||
[[new-name-no-master-block-setting]]
|
||||
==== New name for `no_master_block` setting
|
||||
|
||||
The `discovery.zen.no_master_block` setting is now known as
|
||||
|
|
|
@ -22,12 +22,14 @@ Due to cross-cluster search using `:` to separate a cluster and index name,
|
|||
index names may no longer contain `:`.
|
||||
|
||||
[float]
|
||||
[[index-unassigned-node-left-delayed-timeout-no-longer-negative]]
|
||||
==== `index.unassigned.node_left.delayed_timeout` may no longer be negative
|
||||
|
||||
Negative values were interpreted as zero in earlier versions but are no
|
||||
longer accepted.
|
||||
|
||||
[float]
|
||||
[[flush-force-merge-no-longer-refresh]]
|
||||
==== `_flush` and `_force_merge` will no longer refresh
|
||||
|
||||
In previous versions issuing a `_flush` or `_force_merge` (with `flush=true`)
|
||||
|
@ -85,6 +87,7 @@ The following previously deprecated url parameter have been removed:
|
|||
* `field_data` - use `fielddata` instead
|
||||
|
||||
[float]
|
||||
[[network-breaker-inflight-requests-overhead-increased-to-2]]
|
||||
==== `network.breaker.inflight_requests.overhead` increased to 2
|
||||
|
||||
Previously the in flight requests circuit breaker considered only the raw byte representation.
|
||||
|
@ -108,11 +111,13 @@ there is less need for fielddata. Therefore, the default value of the setting
|
|||
heap size.
|
||||
|
||||
[float]
|
||||
[[fix-value-for-index-shard-check-on-startup-removed]]
|
||||
==== `fix` value for `index.shard.check_on_startup` is removed
|
||||
|
||||
Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported.
|
||||
|
||||
[float]
|
||||
[[elasticsearch-translog-removed]]
|
||||
==== `elasticsearch-translog` is removed
|
||||
|
||||
Use the `elasticsearch-shard` tool to remove corrupted translog data.
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
// end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[isshardsacked-removed]]
|
||||
==== `isShardsAcked` deprecated in `6.2` has been removed
|
||||
|
||||
`isShardsAcked` has been replaced by `isShardsAcknowledged` in
|
||||
|
@ -17,6 +18,7 @@
|
|||
`CreateIndexClusterStateUpdateResponse`.
|
||||
|
||||
[float]
|
||||
[[prepareexecute-removed-client-api]]
|
||||
==== `prepareExecute` removed from the client api
|
||||
|
||||
The `prepareExecute` method which created a request builder has been
|
||||
|
@ -36,18 +38,21 @@ was moved to `org.elasticsearch.search.aggregations.PipelineAggregationBuilders`
|
|||
|
||||
|
||||
[float]
|
||||
[[retry-withbackoff-methods-removed]]
|
||||
==== `Retry.withBackoff` methods with `Settings` removed
|
||||
|
||||
The variants of `Retry.withBackoff` that included `Settings` have been removed
|
||||
because `Settings` is no longer needed.
|
||||
|
||||
[float]
|
||||
[[client-termvector-removed]]
|
||||
==== Deprecated method `Client#termVector` removed
|
||||
|
||||
The client method `termVector`, deprecated in 2.0, has been removed. The method
|
||||
`termVectors` (plural) should be used instead.
|
||||
|
||||
[float]
|
||||
[[abstractlifecyclecomponent-constructor-removed]]
|
||||
==== Deprecated constructor `AbstractLifecycleComponent(Settings settings)` removed
|
||||
|
||||
The constructor `AbstractLifecycleComponent(Settings settings)`, deprecated in 6.7
|
||||
|
|
|
@ -82,6 +82,7 @@ compile time, ensure you have proper test coverage for this in your
|
|||
own code.
|
||||
|
||||
[float]
|
||||
[[parsing-gtm0-timezeone-jdk8-not-supported]]
|
||||
==== Parsing `GMT0` timezone with JDK8 is not supported
|
||||
|
||||
When you are running Elasticsearch 7 with Java 8, you are not able to parse
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
// end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[new-json-format-log-directory]]
|
||||
==== New JSON format log files in `log` directory
|
||||
|
||||
Elasticsearch now will produce additional log files in JSON format. They will be stored in `*.json` suffix files.
|
||||
|
@ -28,6 +29,7 @@ Following files should be expected now in log directory:
|
|||
Note: You can configure which of these files are written by editing `log4j2.properties`.
|
||||
|
||||
[float]
|
||||
[[log-files-ending-log-deprecated]]
|
||||
==== Log files ending with `*.log` deprecated
|
||||
Log files with the `.log` file extension using the old pattern layout format
|
||||
are now considered deprecated and the newly added JSON log file format with
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
// end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[maxretrytimeout-removed]]
|
||||
==== Support for `maxRetryTimeout` removed from RestClient
|
||||
|
||||
`RestClient` and `RestClientBuilder` no longer support the `maxRetryTimeout`
|
||||
|
|
|
@ -10,11 +10,13 @@
|
|||
// end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[all-meta-field-removed]]
|
||||
==== The `_all` meta field is removed
|
||||
|
||||
The `_all` field deprecated in 6 have now been removed.
|
||||
|
||||
[float]
|
||||
[[uid-meta-field-removed]]
|
||||
==== The `_uid` meta field is removed
|
||||
|
||||
This field used to index a composite key formed of the `_type` and the `_id`.
|
||||
|
@ -23,6 +25,7 @@ of `_id`.
|
|||
|
||||
//tag::notable-breaking-changes[]
|
||||
[float]
|
||||
[[default-mapping-not-allowed]]
|
||||
==== The `_default_` mapping is no longer allowed
|
||||
|
||||
The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed
|
||||
|
@ -31,11 +34,13 @@ an error.
|
|||
//end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[index-options-numeric-fields-removed]]
|
||||
==== `index_options` for numeric fields has been removed
|
||||
|
||||
The `index_options` field for numeric fields has been deprecated in 6 and has now been removed.
|
||||
|
||||
[float]
|
||||
[[limit-number-nested-json-objects]]
|
||||
==== Limiting the number of `nested` json objects
|
||||
|
||||
To safeguard against out of memory errors, the number of nested json objects within a single
|
||||
|
@ -43,11 +48,13 @@ document across all fields has been limited to 10000. This default limit can be
|
|||
the index setting `index.mapping.nested_objects.limit`.
|
||||
|
||||
[float]
|
||||
[[update-all-types-option-removed]]
|
||||
==== The `update_all_types` option has been removed
|
||||
|
||||
This option is useless now that all indices have at most one type.
|
||||
|
||||
[float]
|
||||
[[classic-similarity-removed]]
|
||||
==== The `classic` similarity has been removed
|
||||
|
||||
The `classic` similarity relied on coordination factors for scoring to be good
|
||||
|
@ -63,6 +70,7 @@ An error will now be thrown when unknown configuration options are provided
|
|||
to similarities. Such unknown parameters were ignored before.
|
||||
|
||||
[float]
|
||||
[[changed-default-geo-shape-index-strategy]]
|
||||
==== Changed default `geo_shape` indexing strategy
|
||||
|
||||
`geo_shape` types now default to using a vector indexing approach based on Lucene's new
|
||||
|
@ -76,6 +84,7 @@ should also be changed in the template to explicitly define `tree` to one of `ge
|
|||
or `quadtree`. This will ensure compatibility with previously created indexes.
|
||||
|
||||
[float]
|
||||
[[deprecated-geo-shape-params]]
|
||||
==== Deprecated `geo_shape` parameters
|
||||
|
||||
The following type parameters are deprecated for the `geo_shape` field type: `tree`,
|
||||
|
@ -90,6 +99,7 @@ to 10 in the next major version. Completion fields that define more than 10
|
|||
contexts in a mapping will log a deprecation warning in this version.
|
||||
|
||||
[float]
|
||||
[[include-type-name-defaults-false]]
|
||||
==== `include_type_name` now defaults to `false`
|
||||
The default for `include_type_name` is now `false` for all APIs that accept
|
||||
the parameter.
|
||||
|
|
|
@ -84,6 +84,7 @@ Tribe node functionality has been removed in favor of
|
|||
can no longer provide their own discovery implementations.
|
||||
|
||||
[float]
|
||||
[[watcher-hipchat-action-removed]]
|
||||
==== Watcher 'hipchat' action removed
|
||||
|
||||
Hipchat has been deprecated and shut down as a service. The `hipchat` action for
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
// end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[remove-header-args]]
|
||||
==== API methods accepting `Header` argument have been removed
|
||||
|
||||
All API methods accepting headers as a `Header` varargs argument, deprecated
|
||||
|
@ -22,6 +23,7 @@ e.g. `client.index(indexRequest, new Header("name" "value"))` becomes
|
|||
`client.index(indexRequest, RequestOptions.DEFAULT.toBuilder().addHeader("name", "value").build());`
|
||||
|
||||
[float]
|
||||
[[cluster-health-api-default-cluster-level]]
|
||||
==== Cluster Health API default to `cluster` level
|
||||
|
||||
The Cluster Health API used to default to `shards` level to ease migration
|
||||
|
|
|
@ -32,6 +32,7 @@ To check if a document is missing a value, you can use
|
|||
|
||||
|
||||
[float]
|
||||
[[script-errors-return-400-error-codes]]
|
||||
==== Script errors will return as `400` error codes
|
||||
|
||||
Malformed scripts, either in search templates, ingest pipelines or search
|
||||
|
|
|
@ -77,6 +77,7 @@ PUT /_cluster/settings
|
|||
// CONSOLE
|
||||
|
||||
[float]
|
||||
[[search-api-returns-400-invalid-requests]]
|
||||
==== Search API returns `400` for invalid requests
|
||||
|
||||
The Search API returns `400 - Bad request` while it would previously return
|
||||
|
@ -91,6 +92,7 @@ The Search API returns `400 - Bad request` while it would previously return
|
|||
* script compilation errors
|
||||
|
||||
[float]
|
||||
[[scroll-queries-cannot-use-request-cache]]
|
||||
==== Scroll queries cannot use the `request_cache` anymore
|
||||
|
||||
Setting `request_cache:true` on a query that creates a scroll (`scroll=1m`)
|
||||
|
@ -98,6 +100,7 @@ has been deprecated in 6 and will now return a `400 - Bad request`.
|
|||
Scroll queries are not meant to be cached.
|
||||
|
||||
[float]
|
||||
[[scroll-queries-cannot-use-rescore]]
|
||||
==== Scroll queries cannot use `rescore` anymore
|
||||
|
||||
Including a rescore clause on a query that creates a scroll (`scroll=1m`) has
|
||||
|
@ -117,6 +120,7 @@ removed.
|
|||
* `jarowinkler` - replaced by `jaro_winkler`
|
||||
|
||||
[float]
|
||||
[[popular-mode-suggesters]]
|
||||
==== `popular` mode for Suggesters
|
||||
|
||||
The `popular` mode for Suggesters (`term` and `phrase`) now uses the doc frequency
|
||||
|
@ -149,6 +153,7 @@ To safeguard against this, a hard limit of 1024 fields has been introduced for q
|
|||
using the "all fields" mode ("default_field": "*") or other fieldname expansions (e.g. "foo*").
|
||||
|
||||
[float]
|
||||
[[invalid-search-request-body]]
|
||||
==== Invalid `_search` request body
|
||||
|
||||
Search requests with extra content after the main object will no longer be accepted
|
||||
|
@ -176,6 +181,7 @@ For geo context the value of the `path` parameter is now validated against the m
|
|||
and the context is only accepted if `path` points to a field with `geo_point` type.
|
||||
|
||||
[float]
|
||||
[[semantics-changed-max-concurrent-shared-requests]]
|
||||
==== Semantics changed for `max_concurrent_shard_requests`
|
||||
|
||||
`max_concurrent_shard_requests` used to limit the total number of concurrent shard
|
||||
|
@ -183,6 +189,7 @@ requests a single high level search request can execute. In 7.0 this changed to
|
|||
max number of concurrent shard requests per node. The default is now `5`.
|
||||
|
||||
[float]
|
||||
[[max-score-set-to-null-when-untracked]]
|
||||
==== `max_score` set to `null` when scores are not tracked
|
||||
|
||||
`max_score` used to be set to `0` whenever scores are not tracked. `null` is now used
|
||||
|
@ -214,6 +221,7 @@ major version.
|
|||
|
||||
//tag::notable-breaking-changes[]
|
||||
[float]
|
||||
[[hits-total-now-object-search-response]]
|
||||
==== `hits.total` is now an object in the search response
|
||||
|
||||
The total hits that match the search request is now returned as an object
|
||||
|
@ -245,6 +253,7 @@ will be removed in the next major version (8.0).
|
|||
//end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[hits-total-omitted-if-disabled]]
|
||||
==== `hits.total` is omitted in the response if `track_total_hits` is disabled (false)
|
||||
|
||||
If `track_total_hits` is set to `false` in the search request the search response
|
||||
|
@ -254,6 +263,7 @@ to get the old format back (`"total": -1`).
|
|||
|
||||
//tag::notable-breaking-changes[]
|
||||
[float]
|
||||
[[track-total-hits-10000-default]]
|
||||
==== `track_total_hits` defaults to 10,000
|
||||
|
||||
By default search request will count the total hits accurately up to `10,000`
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
// end::notable-breaking-changes[]
|
||||
|
||||
[float]
|
||||
[[default-node-name-now-hostname]]
|
||||
==== The default for `node.name` is now the hostname
|
||||
|
||||
`node.name` now defaults to the hostname at the time when Elasticsearch
|
||||
|
|
|
@ -16,6 +16,7 @@ Snapshot stats details are provided in a new structured way:
|
|||
* In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied.
|
||||
|
||||
[float]
|
||||
[[snapshot-stats-deprecated]]
|
||||
==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed
|
||||
|
||||
* Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`.
|
||||
|
|
|
@ -19,11 +19,13 @@ To learn about monitoring in general, see
|
|||
|
||||
//NOTE: The tagged regions are re-used in the Stack Overview.
|
||||
|
||||
. Enable the collection of monitoring data. Set
|
||||
`xpack.monitoring.collection.enabled` to `true` on each node in the production
|
||||
cluster. By default, it is is disabled (`false`).
|
||||
+
|
||||
. Enable the collection of monitoring data. +
|
||||
+
|
||||
--
|
||||
// tag::enable-collection[]
|
||||
Set `xpack.monitoring.collection.enabled` to `true` on each node in the
|
||||
production cluster. By default, it is is disabled (`false`).
|
||||
|
||||
NOTE: You can specify this setting in either the `elasticsearch.yml` on each
|
||||
node or across the cluster as a dynamic cluster setting. If {es}
|
||||
{security-features} are enabled, you must have `monitor` cluster privileges to
|
||||
|
@ -43,15 +45,17 @@ PUT _cluster/settings
|
|||
}
|
||||
----------------------------------
|
||||
// CONSOLE
|
||||
|
||||
// end::enable-collection[]
|
||||
For more information, see <<monitoring-settings>> and <<cluster-update-settings>>.
|
||||
--
|
||||
|
||||
. Disable the default collection of {es} monitoring metrics. Set
|
||||
`xpack.monitoring.elasticsearch.collection.enabled` to `false` on each node in
|
||||
the production cluster.
|
||||
+
|
||||
. Disable the default collection of {es} monitoring metrics. +
|
||||
+
|
||||
--
|
||||
// tag::disable-default-collection[]
|
||||
Set `xpack.monitoring.elasticsearch.collection.enabled` to `false` on each node
|
||||
in the production cluster.
|
||||
|
||||
NOTE: You can specify this setting in either the `elasticsearch.yml` on each
|
||||
node or across the cluster as a dynamic cluster setting. If {es}
|
||||
{security-features} are enabled, you must have `monitor` cluster privileges to
|
||||
|
@ -70,7 +74,8 @@ PUT _cluster/settings
|
|||
----------------------------------
|
||||
// CONSOLE
|
||||
|
||||
Leave `xpack.monitoring.enabled` set to its default value (`true`).
|
||||
Leave `xpack.monitoring.enabled` set to its default value (`true`).
|
||||
// end::disable-default-collection[]
|
||||
--
|
||||
|
||||
. {metricbeat-ref}/metricbeat-installation.html[Install {metricbeat}] on each
|
||||
|
|
|
@ -52,12 +52,12 @@ GET /_search
|
|||
"query": { <1>
|
||||
"bool": { <2>
|
||||
"must": [
|
||||
{ "match": { "title": "Search" }}, <2>
|
||||
{ "match": { "content": "Elasticsearch" }} <2>
|
||||
{ "match": { "title": "Search" }},
|
||||
{ "match": { "content": "Elasticsearch" }}
|
||||
],
|
||||
"filter": [ <3>
|
||||
{ "term": { "status": "published" }}, <4>
|
||||
{ "range": { "publish_date": { "gte": "2015-01-01" }}} <4>
|
||||
{ "term": { "status": "published" }},
|
||||
{ "range": { "publish_date": { "gte": "2015-01-01" }}}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -68,11 +68,16 @@ GET /_search
|
|||
<2> The `bool` and two `match` clauses are used in query context,
|
||||
which means that they are used to score how well each document
|
||||
matches.
|
||||
<3> The `filter` parameter indicates filter context.
|
||||
<4> The `term` and `range` clauses are used in filter context.
|
||||
They will filter out documents which do not match, but they will
|
||||
<3> The `filter` parameter indicates filter context. Its `term` and
|
||||
`range` clauses are used in filter context. They will filter out
|
||||
documents which do not match, but they will
|
||||
not affect the score for matching documents.
|
||||
|
||||
WARNING: Scores calculated for queries in query context are represented
|
||||
as single precision floating point numbers; they have only
|
||||
24 bits for significand's precision. Score calculations that exceed the
|
||||
significand's precision will be converted to floats with loss of precision.
|
||||
|
||||
TIP: Use query clauses in query context for conditions which should affect the
|
||||
score of matching documents (i.e. how well does the document match), and use
|
||||
all other query clauses in filter context.
|
||||
|
|
|
@ -1,51 +1,67 @@
|
|||
[[query-dsl-wildcard-query]]
|
||||
=== Wildcard Query
|
||||
Returns documents that contain terms matching a wildcard pattern.
|
||||
|
||||
Matches documents that have fields matching a wildcard expression (*not
|
||||
analyzed*). Supported wildcards are `*`, which matches any character
|
||||
sequence (including the empty one), and `?`, which matches any single
|
||||
character. Note that this query can be slow, as it needs to iterate over many
|
||||
terms. In order to prevent extremely slow wildcard queries, a wildcard
|
||||
term should not start with one of the wildcards `*` or `?`. The wildcard
|
||||
query maps to Lucene `WildcardQuery`.
|
||||
A wildcard operator is a placeholder that matches one or more characters. For
|
||||
example, the `*` wildcard operator matches zero or more characters. You can
|
||||
combine wildcard operators with other characters to create a wildcard pattern.
|
||||
|
||||
[[wildcard-query-ex-request]]
|
||||
==== Example request
|
||||
|
||||
The following search returns documents where the `user` field contains a term
|
||||
that begins with `ki` and ends with `y`. These matching terms can include `kiy`,
|
||||
`kity`, or `kimchy`.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
----
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"wildcard" : { "user" : "ki*y" }
|
||||
"wildcard": {
|
||||
"user": {
|
||||
"value": "ki*y",
|
||||
"boost": 1.0,
|
||||
"rewrite": "constant_score"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
----
|
||||
// CONSOLE
|
||||
|
||||
A boost can also be associated with the query:
|
||||
[[wildcard-top-level-params]]
|
||||
==== Top-level parameters for `wildcard`
|
||||
`<field>`::
|
||||
Field you wish to search.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"wildcard" : { "user" : { "value" : "ki*y", "boost" : 2.0 } }
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
[[wildcard-query-field-params]]
|
||||
==== Parameters for `<field>`
|
||||
`value`::
|
||||
Wildcard pattern for terms you wish to find in the provided `<field>`.
|
||||
+
|
||||
--
|
||||
This parameter supports two wildcard operators:
|
||||
|
||||
Or :
|
||||
* `?`, which matches any single character
|
||||
* `*`, which can match zero or more characters, including an empty one
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"query": {
|
||||
"wildcard" : { "user" : { "wildcard" : "ki*y", "boost" : 2.0 } }
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
WARNING: Avoid beginning patterns with `*` or `?`. This can increase
|
||||
the iterations needed to find matching terms and slow search performance.
|
||||
--
|
||||
|
||||
This multi term query allows to control how it gets rewritten using the
|
||||
<<query-dsl-multi-term-rewrite,rewrite>>
|
||||
parameter.
|
||||
`boost`::
|
||||
Floating point number used to decrease or increase the
|
||||
<<query-filter-context, relevance scores>> of a query. Default is `1.0`.
|
||||
Optional.
|
||||
+
|
||||
You can use the `boost` parameter to adjust relevance scores for searches
|
||||
containing two or more queries.
|
||||
+
|
||||
Boost values are relative to the default value of `1.0`. A boost value between
|
||||
`0` and `1.0` decreases the relevance score. A value greater than `1.0`
|
||||
increases the relevance score.
|
||||
|
||||
`rewrite` (Expert)::
|
||||
Method used to rewrite the query. For valid values and more information, see the
|
||||
<<query-dsl-multi-term-rewrite, `rewrite` parameter>>. Optional.
|
|
@ -283,5 +283,6 @@ For example: `["elasticsearch_version_mismatch","xpack_license_expiration"]`.
|
|||
:component: {monitoring}
|
||||
:verifies:
|
||||
:server!:
|
||||
:ssl-context: monitoring
|
||||
|
||||
include::ssl-settings.asciidoc[]
|
||||
|
|
|
@ -85,6 +85,7 @@ corresponding endpoints are whitelisted as well.
|
|||
:component: {watcher}
|
||||
:verifies:
|
||||
:server!:
|
||||
:ssl-context: watcher
|
||||
|
||||
include::ssl-settings.asciidoc[]
|
||||
|
||||
|
|
|
@ -1582,6 +1582,7 @@ a PKCS#12 container includes trusted certificate ("anchor") entries look for
|
|||
:client-auth-default: none
|
||||
:verifies!:
|
||||
:server:
|
||||
:ssl-context: security-http
|
||||
|
||||
include::ssl-settings.asciidoc[]
|
||||
|
||||
|
@ -1591,6 +1592,7 @@ include::ssl-settings.asciidoc[]
|
|||
:client-auth-default!:
|
||||
:verifies:
|
||||
:server:
|
||||
:ssl-context: security-transport
|
||||
|
||||
include::ssl-settings.asciidoc[]
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
==== {component} TLS/SSL Settings
|
||||
You can configure the following TLS/SSL settings. If the settings are not configured,
|
||||
the {ref}/security-settings.html#ssl-tls-settings[Default TLS/SSL Settings]
|
||||
|
@ -39,7 +38,13 @@ endif::verifies[]
|
|||
Supported cipher suites can be found in Oracle's http://docs.oracle.com/javase/8/docs/technotes/guides/security/SunProviders.html[
|
||||
Java Cryptography Architecture documentation]. Defaults to ``.
|
||||
|
||||
ifdef::asciidoctor[]
|
||||
[#{ssl-context}-tls-ssl-key-trusted-certificate-settings]
|
||||
===== {component} TLS/SSL Key and Trusted Certificate Settings
|
||||
endif::[]
|
||||
ifndef::asciidoctor[]
|
||||
===== anchor:{ssl-context}-tls-ssl-key-trusted-certificate-settings[] {component} TLS/SSL Key and Trusted Certificate Settings
|
||||
endif::[]
|
||||
|
||||
The following settings are used to specify a private key, certificate, and the
|
||||
trusted certificates that should be used when communicating over an SSL/TLS connection.
|
||||
|
@ -105,7 +110,13 @@ Password to the truststore.
|
|||
+{ssl-prefix}.ssl.truststore.secure_password+ (<<secure-settings,Secure>>)::
|
||||
Password to the truststore.
|
||||
|
||||
ifdef::asciidoctor[]
|
||||
[#{ssl-context}-pkcs12-files]
|
||||
===== PKCS#12 Files
|
||||
endif::[]
|
||||
ifndef::asciidoctor[]
|
||||
===== anchor:{ssl-context}-pkcs12-files[] PKCS#12 Files
|
||||
endif::[]
|
||||
|
||||
{es} can be configured to use PKCS#12 container files (`.p12` or `.pfx` files)
|
||||
that contain the private key, certificate and certificates that should be trusted.
|
||||
|
@ -143,7 +154,13 @@ Password to the PKCS#12 file.
|
|||
+{ssl-prefix}.ssl.truststore.secure_password+ (<<secure-settings,Secure>>)::
|
||||
Password to the PKCS#12 file.
|
||||
|
||||
ifdef::asciidoctor[]
|
||||
[#{ssl-context}-pkcs11-tokens]
|
||||
===== PKCS#11 Tokens
|
||||
endif::[]
|
||||
ifndef::asciidoctor[]
|
||||
===== anchor:{ssl-context}-pkcs11-tokens[] PKCS#11 Tokens
|
||||
endif::[]
|
||||
|
||||
{es} can be configured to use a PKCS#11 token that contains the private key,
|
||||
certificate and certificates that should be trusted.
|
||||
|
|
|
@ -56,13 +56,17 @@ public class WellKnownText {
|
|||
private static final String EOF = "END-OF-STREAM";
|
||||
private static final String EOL = "END-OF-LINE";
|
||||
|
||||
public static String toWKT(Geometry geometry) {
|
||||
public WellKnownText() {
|
||||
|
||||
}
|
||||
|
||||
public String toWKT(Geometry geometry) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
toWKT(geometry, builder);
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
public static void toWKT(Geometry geometry, StringBuilder sb) {
|
||||
public void toWKT(Geometry geometry, StringBuilder sb) {
|
||||
sb.append(getWKTName(geometry));
|
||||
sb.append(SPACE);
|
||||
if (geometry.isEmpty()) {
|
||||
|
@ -216,7 +220,7 @@ public class WellKnownText {
|
|||
}
|
||||
}
|
||||
|
||||
public static Geometry fromWKT(String wkt) throws IOException, ParseException {
|
||||
public Geometry fromWKT(String wkt) throws IOException, ParseException {
|
||||
StringReader reader = new StringReader(wkt);
|
||||
try {
|
||||
// setup the tokenizer; configured to read words w/o numbers
|
||||
|
|
|
@ -53,9 +53,10 @@ abstract class BaseGeometryTestCase<T extends Geometry> extends AbstractWireTest
|
|||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected T copyInstance(T instance, Version version) throws IOException {
|
||||
String text = WellKnownText.toWKT(instance);
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
String text = wkt.toWKT(instance);
|
||||
try {
|
||||
return (T) WellKnownText.fromWKT(text);
|
||||
return (T) wkt.fromWKT(text);
|
||||
} catch (ParseException e) {
|
||||
throw new ElasticsearchException(e);
|
||||
}
|
||||
|
|
|
@ -36,14 +36,15 @@ public class CircleTests extends BaseGeometryTestCase<Circle> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
assertEquals("circle (20.0 10.0 15.0)", WellKnownText.toWKT(new Circle(10, 20, 15)));
|
||||
assertEquals(new Circle(10, 20, 15), WellKnownText.fromWKT("circle (20.0 10.0 15.0)"));
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("circle (20.0 10.0 15.0)", wkt.toWKT(new Circle(10, 20, 15)));
|
||||
assertEquals(new Circle(10, 20, 15), wkt.fromWKT("circle (20.0 10.0 15.0)"));
|
||||
|
||||
assertEquals("circle (20.0 10.0 15.0 25.0)", WellKnownText.toWKT(new Circle(10, 20, 25, 15)));
|
||||
assertEquals(new Circle(10, 20, 25, 15), WellKnownText.fromWKT("circle (20.0 10.0 15.0 25.0)"));
|
||||
assertEquals("circle (20.0 10.0 15.0 25.0)", wkt.toWKT(new Circle(10, 20, 25, 15)));
|
||||
assertEquals(new Circle(10, 20, 25, 15), wkt.fromWKT("circle (20.0 10.0 15.0 25.0)"));
|
||||
|
||||
assertEquals("circle EMPTY", WellKnownText.toWKT(Circle.EMPTY));
|
||||
assertEquals(Circle.EMPTY, WellKnownText.fromWKT("circle EMPTY)"));
|
||||
assertEquals("circle EMPTY", wkt.toWKT(Circle.EMPTY));
|
||||
assertEquals(Circle.EMPTY, wkt.fromWKT("circle EMPTY)"));
|
||||
}
|
||||
|
||||
public void testInitValidation() {
|
||||
|
|
|
@ -35,14 +35,15 @@ public class GeometryCollectionTests extends BaseGeometryTestCase<GeometryCollec
|
|||
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("geometrycollection (point (20.0 10.0),point EMPTY)",
|
||||
WellKnownText.toWKT(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY))));
|
||||
wkt.toWKT(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY))));
|
||||
|
||||
assertEquals(new GeometryCollection<Geometry>(Arrays.asList(new Point(10, 20), Point.EMPTY)),
|
||||
WellKnownText.fromWKT("geometrycollection (point (20.0 10.0),point EMPTY)"));
|
||||
wkt.fromWKT("geometrycollection (point (20.0 10.0),point EMPTY)"));
|
||||
|
||||
assertEquals("geometrycollection EMPTY", WellKnownText.toWKT(GeometryCollection.EMPTY));
|
||||
assertEquals(GeometryCollection.EMPTY, WellKnownText.fromWKT("geometrycollection EMPTY)"));
|
||||
assertEquals("geometrycollection EMPTY", wkt.toWKT(GeometryCollection.EMPTY));
|
||||
assertEquals(GeometryCollection.EMPTY, wkt.fromWKT("geometrycollection EMPTY)"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
|
|
|
@ -31,16 +31,17 @@ public class LineTests extends BaseGeometryTestCase<Line> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
assertEquals("linestring (3.0 1.0, 4.0 2.0)", WellKnownText.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4})));
|
||||
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}), WellKnownText.fromWKT("linestring (3 1, 4 2)"));
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("linestring (3.0 1.0, 4.0 2.0)", wkt.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4})));
|
||||
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}), wkt.fromWKT("linestring (3 1, 4 2)"));
|
||||
|
||||
assertEquals("linestring (3.0 1.0 5.0, 4.0 2.0 6.0)", WellKnownText.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4},
|
||||
assertEquals("linestring (3.0 1.0 5.0, 4.0 2.0 6.0)", wkt.toWKT(new Line(new double[]{1, 2}, new double[]{3, 4},
|
||||
new double[]{5, 6})));
|
||||
assertEquals(new Line(new double[]{1, 2}, new double[]{3, 4}, new double[]{6, 5}),
|
||||
WellKnownText.fromWKT("linestring (3 1 6, 4 2 5)"));
|
||||
wkt.fromWKT("linestring (3 1 6, 4 2 5)"));
|
||||
|
||||
assertEquals("linestring EMPTY", WellKnownText.toWKT(Line.EMPTY));
|
||||
assertEquals(Line.EMPTY, WellKnownText.fromWKT("linestring EMPTY)"));
|
||||
assertEquals("linestring EMPTY", wkt.toWKT(Line.EMPTY));
|
||||
assertEquals(Line.EMPTY, wkt.fromWKT("linestring EMPTY)"));
|
||||
}
|
||||
|
||||
public void testInitValidation() {
|
||||
|
|
|
@ -26,7 +26,7 @@ public class LinearRingTests extends ESTestCase {
|
|||
|
||||
public void testBasicSerialization() {
|
||||
UnsupportedOperationException ex = expectThrows(UnsupportedOperationException.class,
|
||||
() -> WellKnownText.toWKT(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})));
|
||||
() -> new WellKnownText().toWKT(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})));
|
||||
assertEquals("line ring cannot be serialized using WKT", ex.getMessage());
|
||||
}
|
||||
|
||||
|
|
|
@ -40,12 +40,13 @@ public class MultiLineTests extends BaseGeometryTestCase<MultiLine> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
assertEquals("multilinestring ((3.0 1.0, 4.0 2.0))", WellKnownText.toWKT(
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("multilinestring ((3.0 1.0, 4.0 2.0))", wkt.toWKT(
|
||||
new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4})))));
|
||||
assertEquals(new MultiLine(Collections.singletonList(new Line(new double[]{1, 2}, new double[]{3, 4}))),
|
||||
WellKnownText.fromWKT("multilinestring ((3 1, 4 2))"));
|
||||
wkt.fromWKT("multilinestring ((3 1, 4 2))"));
|
||||
|
||||
assertEquals("multilinestring EMPTY", WellKnownText.toWKT(MultiLine.EMPTY));
|
||||
assertEquals(MultiLine.EMPTY, WellKnownText.fromWKT("multilinestring EMPTY)"));
|
||||
assertEquals("multilinestring EMPTY", wkt.toWKT(MultiLine.EMPTY));
|
||||
assertEquals(MultiLine.EMPTY, wkt.fromWKT("multilinestring EMPTY)"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,22 +41,23 @@ public class MultiPointTests extends BaseGeometryTestCase<MultiPoint> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
assertEquals("multipoint (2.0 1.0)", WellKnownText.toWKT(
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("multipoint (2.0 1.0)", wkt.toWKT(
|
||||
new MultiPoint(Collections.singletonList(new Point(1, 2)))));
|
||||
assertEquals(new MultiPoint(Collections.singletonList(new Point(1 ,2))),
|
||||
WellKnownText.fromWKT("multipoint (2 1)"));
|
||||
wkt.fromWKT("multipoint (2 1)"));
|
||||
|
||||
assertEquals("multipoint (2.0 1.0, 3.0 4.0)",
|
||||
WellKnownText.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3)))));
|
||||
wkt.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3)))));
|
||||
assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2), new Point(4, 3))),
|
||||
WellKnownText.fromWKT("multipoint (2 1, 3 4)"));
|
||||
wkt.fromWKT("multipoint (2 1, 3 4)"));
|
||||
|
||||
assertEquals("multipoint (2.0 1.0 10.0, 3.0 4.0 20.0)",
|
||||
WellKnownText.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20)))));
|
||||
wkt.toWKT(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20)))));
|
||||
assertEquals(new MultiPoint(Arrays.asList(new Point(1, 2, 10), new Point(4, 3, 20))),
|
||||
WellKnownText.fromWKT("multipoint (2 1 10, 3 4 20)"));
|
||||
wkt.fromWKT("multipoint (2 1 10, 3 4 20)"));
|
||||
|
||||
assertEquals("multipoint EMPTY", WellKnownText.toWKT(MultiPoint.EMPTY));
|
||||
assertEquals(MultiPoint.EMPTY, WellKnownText.fromWKT("multipoint EMPTY)"));
|
||||
assertEquals("multipoint EMPTY", wkt.toWKT(MultiPoint.EMPTY));
|
||||
assertEquals(MultiPoint.EMPTY, wkt.fromWKT("multipoint EMPTY)"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,14 +40,15 @@ public class MultiPolygonTests extends BaseGeometryTestCase<MultiPolygon> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))",
|
||||
WellKnownText.toWKT(new MultiPolygon(Collections.singletonList(
|
||||
wkt.toWKT(new MultiPolygon(Collections.singletonList(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))))));
|
||||
assertEquals(new MultiPolygon(Collections.singletonList(
|
||||
new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})))),
|
||||
WellKnownText.fromWKT("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))"));
|
||||
wkt.fromWKT("multipolygon (((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0)))"));
|
||||
|
||||
assertEquals("multipolygon EMPTY", WellKnownText.toWKT(MultiPolygon.EMPTY));
|
||||
assertEquals(MultiPolygon.EMPTY, WellKnownText.fromWKT("multipolygon EMPTY)"));
|
||||
assertEquals("multipolygon EMPTY", wkt.toWKT(MultiPolygon.EMPTY));
|
||||
assertEquals(MultiPolygon.EMPTY, wkt.fromWKT("multipolygon EMPTY)"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,14 +31,15 @@ public class PointTests extends BaseGeometryTestCase<Point> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
assertEquals("point (20.0 10.0)", WellKnownText.toWKT(new Point(10, 20)));
|
||||
assertEquals(new Point(10, 20), WellKnownText.fromWKT("point (20.0 10.0)"));
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("point (20.0 10.0)", wkt.toWKT(new Point(10, 20)));
|
||||
assertEquals(new Point(10, 20), wkt.fromWKT("point (20.0 10.0)"));
|
||||
|
||||
assertEquals("point (20.0 10.0 100.0)", WellKnownText.toWKT(new Point(10, 20, 100)));
|
||||
assertEquals(new Point(10, 20, 100), WellKnownText.fromWKT("point (20.0 10.0 100.0)"));
|
||||
assertEquals("point (20.0 10.0 100.0)", wkt.toWKT(new Point(10, 20, 100)));
|
||||
assertEquals(new Point(10, 20, 100), wkt.fromWKT("point (20.0 10.0 100.0)"));
|
||||
|
||||
assertEquals("point EMPTY", WellKnownText.toWKT(Point.EMPTY));
|
||||
assertEquals(Point.EMPTY, WellKnownText.fromWKT("point EMPTY)"));
|
||||
assertEquals("point EMPTY", wkt.toWKT(Point.EMPTY));
|
||||
assertEquals(Point.EMPTY, wkt.fromWKT("point EMPTY)"));
|
||||
}
|
||||
|
||||
public void testInitValidation() {
|
||||
|
|
|
@ -32,18 +32,19 @@ public class PolygonTests extends BaseGeometryTestCase<Polygon> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("polygon ((3.0 1.0, 4.0 2.0, 5.0 3.0, 3.0 1.0))",
|
||||
WellKnownText.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))));
|
||||
wkt.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}))));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3})),
|
||||
WellKnownText.fromWKT("polygon ((3 1, 4 2, 5 3, 3 1))"));
|
||||
wkt.fromWKT("polygon ((3 1, 4 2, 5 3, 3 1))"));
|
||||
|
||||
assertEquals("polygon ((3.0 1.0 5.0, 4.0 2.0 4.0, 5.0 3.0 3.0, 3.0 1.0 5.0))",
|
||||
WellKnownText.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5}))));
|
||||
wkt.toWKT(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5}))));
|
||||
assertEquals(new Polygon(new LinearRing(new double[]{1, 2, 3, 1}, new double[]{3, 4, 5, 3}, new double[]{5, 4, 3, 5})),
|
||||
WellKnownText.fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))"));
|
||||
wkt.fromWKT("polygon ((3 1 5, 4 2 4, 5 3 3, 3 1 5))"));
|
||||
|
||||
assertEquals("polygon EMPTY", WellKnownText.toWKT(Polygon.EMPTY));
|
||||
assertEquals(Polygon.EMPTY, WellKnownText.fromWKT("polygon EMPTY)"));
|
||||
assertEquals("polygon EMPTY", wkt.toWKT(Polygon.EMPTY));
|
||||
assertEquals(Polygon.EMPTY, wkt.fromWKT("polygon EMPTY)"));
|
||||
}
|
||||
|
||||
public void testInitValidation() {
|
||||
|
|
|
@ -32,11 +32,12 @@ public class RectangleTests extends BaseGeometryTestCase<Rectangle> {
|
|||
}
|
||||
|
||||
public void testBasicSerialization() throws IOException, ParseException {
|
||||
assertEquals("bbox (10.0, 20.0, 40.0, 30.0)", WellKnownText.toWKT(new Rectangle(30, 40, 10, 20)));
|
||||
assertEquals(new Rectangle(30, 40, 10, 20), WellKnownText.fromWKT("bbox (10.0, 20.0, 40.0, 30.0)"));
|
||||
WellKnownText wkt = new WellKnownText();
|
||||
assertEquals("bbox (10.0, 20.0, 40.0, 30.0)", wkt.toWKT(new Rectangle(30, 40, 10, 20)));
|
||||
assertEquals(new Rectangle(30, 40, 10, 20), wkt.fromWKT("bbox (10.0, 20.0, 40.0, 30.0)"));
|
||||
|
||||
assertEquals("bbox EMPTY", WellKnownText.toWKT(Rectangle.EMPTY));
|
||||
assertEquals(Rectangle.EMPTY, WellKnownText.fromWKT("bbox EMPTY)"));
|
||||
assertEquals("bbox EMPTY", wkt.toWKT(Rectangle.EMPTY));
|
||||
assertEquals(Rectangle.EMPTY, wkt.fromWKT("bbox EMPTY)"));
|
||||
}
|
||||
|
||||
public void testInitValidation() {
|
||||
|
|
|
@ -38,20 +38,37 @@ class ExpressionAggregationScript implements AggregationScript.LeafFactory {
|
|||
final Expression exprScript;
|
||||
final SimpleBindings bindings;
|
||||
final DoubleValuesSource source;
|
||||
final boolean needsScore;
|
||||
final ReplaceableConstDoubleValueSource specialValue; // _value
|
||||
|
||||
ExpressionAggregationScript(Expression e, SimpleBindings b, ReplaceableConstDoubleValueSource v) {
|
||||
ExpressionAggregationScript(Expression e, SimpleBindings b, boolean n, ReplaceableConstDoubleValueSource v) {
|
||||
exprScript = e;
|
||||
bindings = b;
|
||||
source = exprScript.getDoubleValuesSource(bindings);
|
||||
needsScore = n;
|
||||
specialValue = v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needs_score() {
|
||||
return needsScore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AggregationScript newInstance(final LeafReaderContext leaf) throws IOException {
|
||||
return new AggregationScript() {
|
||||
// Fake the scorer until setScorer is called.
|
||||
DoubleValues values = source.getValues(leaf, null);
|
||||
DoubleValues values = source.getValues(leaf, new DoubleValues() {
|
||||
@Override
|
||||
public double doubleValue() throws IOException {
|
||||
return get_score().doubleValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean advanceExact(int doc) throws IOException {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
@Override
|
||||
public Object execute() {
|
||||
|
@ -84,10 +101,4 @@ class ExpressionAggregationScript implements AggregationScript.LeafFactory {
|
|||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needs_score() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -221,10 +221,14 @@ public class ExpressionScriptEngine implements ScriptEngine {
|
|||
// NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings,
|
||||
// instead of complicating SimpleBindings (which should stay simple)
|
||||
SimpleBindings bindings = new SimpleBindings();
|
||||
boolean needsScores = false;
|
||||
ReplaceableConstDoubleValueSource specialValue = null;
|
||||
for (String variable : expr.variables) {
|
||||
try {
|
||||
if (variable.equals("_value")) {
|
||||
if (variable.equals("_score")) {
|
||||
bindings.add(new SortField("_score", SortField.Type.SCORE));
|
||||
needsScores = true;
|
||||
} else if (variable.equals("_value")) {
|
||||
specialValue = new ReplaceableConstDoubleValueSource();
|
||||
bindings.add("_value", specialValue);
|
||||
// noop: _value is special for aggregations, and is handled in ExpressionScriptBindings
|
||||
|
@ -237,6 +241,7 @@ public class ExpressionScriptEngine implements ScriptEngine {
|
|||
// delegate valuesource creation based on field's type
|
||||
// there are three types of "fields" to expressions, and each one has a different "api" of variables and methods.
|
||||
final ValueSource valueSource = getDocValueSource(variable, lookup);
|
||||
needsScores |= valueSource.getSortField(false).needsScores();
|
||||
bindings.add(variable, valueSource.asDoubleValuesSource());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -244,7 +249,7 @@ public class ExpressionScriptEngine implements ScriptEngine {
|
|||
throw convertToScriptException("link error", expr.sourceText, variable, e);
|
||||
}
|
||||
}
|
||||
return new ExpressionAggregationScript(expr, bindings, specialValue);
|
||||
return new ExpressionAggregationScript(expr, bindings, needsScores, specialValue);
|
||||
}
|
||||
|
||||
private FieldScript.LeafFactory newFieldScript(Expression expr, SearchLookup lookup, @Nullable Map<String, Object> vars) {
|
||||
|
|
|
@ -28,8 +28,8 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
|
||||
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
|
@ -120,7 +120,7 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||
client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"),
|
||||
client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"),
|
||||
client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye"));
|
||||
ScoreFunctionBuilder<?> score = ScoreFunctionBuilders.scriptFunction(
|
||||
ScriptScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction(
|
||||
new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()));
|
||||
SearchRequestBuilder req = client().prepareSearch().setIndices("test");
|
||||
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE));
|
||||
|
@ -132,6 +132,15 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
|||
assertEquals("1", hits.getAt(0).getId());
|
||||
assertEquals("3", hits.getAt(1).getId());
|
||||
assertEquals("2", hits.getAt(2).getId());
|
||||
|
||||
req = client().prepareSearch().setIndices("test");
|
||||
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE));
|
||||
score = ScoreFunctionBuilders.scriptFunction(
|
||||
new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()));
|
||||
req.addAggregation(AggregationBuilders.max("max_score").script((score).getScript()));
|
||||
req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent
|
||||
rsp = req.get();
|
||||
assertSearchResponse(rsp);
|
||||
}
|
||||
|
||||
public void testDateMethods() throws Exception {
|
||||
|
|
|
@ -25,9 +25,25 @@ setup:
|
|||
rest_total_hits_as_int: true
|
||||
body:
|
||||
script_fields:
|
||||
my_field :
|
||||
my_field:
|
||||
script:
|
||||
lang: expression
|
||||
source: 'doc["age"].value + 19'
|
||||
|
||||
- match: { hits.hits.0.fields.my_field.0: 42.0 }
|
||||
- match: { hits.hits.0.fields.my_field.0: 42.0 }
|
||||
|
||||
---
|
||||
"Expressions aggregation score test":
|
||||
|
||||
- do:
|
||||
search:
|
||||
rest_total_hits_as_int: true
|
||||
body:
|
||||
aggs:
|
||||
max_score:
|
||||
max:
|
||||
script:
|
||||
lang: expression
|
||||
source: '_score'
|
||||
|
||||
- match: { aggregations.max_score.value: 1.0 }
|
||||
|
|
|
@ -23,8 +23,8 @@ import io.netty.bootstrap.ServerBootstrap;
|
|||
import io.netty.channel.Channel;
|
||||
import io.netty.channel.ChannelFuture;
|
||||
import io.netty.channel.ChannelHandler;
|
||||
import io.netty.channel.ChannelHandlerAdapter;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter;
|
||||
import io.netty.channel.ChannelInitializer;
|
||||
import io.netty.channel.ChannelOption;
|
||||
import io.netty.channel.FixedRecvByteBufAllocator;
|
||||
|
@ -351,7 +351,7 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport {
|
|||
}
|
||||
|
||||
@ChannelHandler.Sharable
|
||||
private static class ServerChannelExceptionHandler extends ChannelHandlerAdapter {
|
||||
private static class ServerChannelExceptionHandler extends ChannelInboundHandlerAdapter {
|
||||
|
||||
private final Netty4HttpServerTransport transport;
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.plugins.NetworkPlugin;
|
|||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.netty4.Netty4Transport;
|
||||
import org.elasticsearch.transport.netty4.Netty4Utils;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -45,10 +44,6 @@ import java.util.function.Supplier;
|
|||
|
||||
public class Netty4Plugin extends Plugin implements NetworkPlugin {
|
||||
|
||||
static {
|
||||
Netty4Utils.setup();
|
||||
}
|
||||
|
||||
public static final String NETTY_TRANSPORT_NAME = "netty4";
|
||||
public static final String NETTY_HTTP_TRANSPORT_NAME = "netty4";
|
||||
|
||||
|
|
|
@ -25,8 +25,8 @@ import io.netty.channel.AdaptiveRecvByteBufAllocator;
|
|||
import io.netty.channel.Channel;
|
||||
import io.netty.channel.ChannelFuture;
|
||||
import io.netty.channel.ChannelHandler;
|
||||
import io.netty.channel.ChannelHandlerAdapter;
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter;
|
||||
import io.netty.channel.ChannelInitializer;
|
||||
import io.netty.channel.ChannelOption;
|
||||
import io.netty.channel.FixedRecvByteBufAllocator;
|
||||
|
@ -315,7 +315,7 @@ public class Netty4Transport extends TcpTransport {
|
|||
}
|
||||
|
||||
@ChannelHandler.Sharable
|
||||
private class ServerChannelExceptionHandler extends ChannelHandlerAdapter {
|
||||
private class ServerChannelExceptionHandler extends ChannelInboundHandlerAdapter {
|
||||
|
||||
@Override
|
||||
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
|
||||
|
|
|
@ -36,6 +36,9 @@ setup:
|
|||
|
||||
---
|
||||
"pre_filter_shard_size with invalid parameter":
|
||||
- skip:
|
||||
version: "all"
|
||||
reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42679"
|
||||
- do:
|
||||
catch: /preFilterShardSize must be >= 1/
|
||||
search:
|
||||
|
@ -45,6 +48,9 @@ setup:
|
|||
|
||||
---
|
||||
"pre_filter_shard_size with shards that have no hit":
|
||||
- skip:
|
||||
version: "all"
|
||||
reason: "AwaitsFix https://github.com/elastic/elasticsearch/issues/42679"
|
||||
- do:
|
||||
index:
|
||||
index: index_1
|
||||
|
|
|
@ -140,6 +140,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
public static final Version V_7_1_0 = new Version(V_7_1_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||
public static final int V_7_1_1_ID = 7010199;
|
||||
public static final Version V_7_1_1 = new Version(V_7_1_1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||
public static final int V_7_1_2_ID = 7010299;
|
||||
public static final Version V_7_1_2 = new Version(V_7_1_2_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||
public static final int V_7_2_0_ID = 7020099;
|
||||
public static final Version V_7_2_0 = new Version(V_7_2_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||
public static final int V_7_3_0_ID = 7030099;
|
||||
|
@ -161,6 +163,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||
return V_7_3_0;
|
||||
case V_7_2_0_ID:
|
||||
return V_7_2_0;
|
||||
case V_7_1_2_ID:
|
||||
return V_7_1_2;
|
||||
case V_7_1_1_ID:
|
||||
return V_7_1_1;
|
||||
case V_7_1_0_ID:
|
||||
|
|
|
@ -378,7 +378,9 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
}
|
||||
|
||||
/**
|
||||
* Indicates if the total hit count for the query should be tracked. Defaults to {@code true}
|
||||
* Indicates if the total hit count for the query should be tracked. Requests will count total hit count accurately
|
||||
* up to 10,000 by default, see {@link #setTrackTotalHitsUpTo(int)} to change this value or set to true/false to always/never
|
||||
* count accurately.
|
||||
*/
|
||||
public SearchRequestBuilder setTrackTotalHits(boolean trackTotalHits) {
|
||||
sourceBuilder().trackTotalHits(trackTotalHits);
|
||||
|
@ -386,7 +388,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
}
|
||||
|
||||
/**
|
||||
* Indicates if the total hit count for the query should be tracked. Defaults to {@code true}
|
||||
* Indicates the total hit count that should be tracked accurately or null if the value is unset. Defaults to 10,000.
|
||||
*/
|
||||
public SearchRequestBuilder setTrackTotalHitsUpTo(int trackTotalHitsUpTo) {
|
||||
sourceBuilder().trackTotalHitsUpTo(trackTotalHitsUpTo);
|
||||
|
|
|
@ -170,7 +170,7 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery
|
|||
new HandshakingTransportAddressConnector(settings, transportService), configuredHostsResolver);
|
||||
this.publicationHandler = new PublicationTransportHandler(transportService, namedWriteableRegistry,
|
||||
this::handlePublishRequest, this::handleApplyCommit);
|
||||
this.leaderChecker = new LeaderChecker(settings, transportService, getOnLeaderFailure());
|
||||
this.leaderChecker = new LeaderChecker(settings, transportService, this::onLeaderFailure);
|
||||
this.followersChecker = new FollowersChecker(settings, transportService, this::onFollowerCheckRequest, this::removeNode);
|
||||
this.nodeRemovalExecutor = new NodeRemovalClusterStateTaskExecutor(allocationService, logger);
|
||||
this.clusterApplier = clusterApplier;
|
||||
|
@ -191,20 +191,14 @@ public class Coordinator extends AbstractLifecycleComponent implements Discovery
|
|||
StreamSupport.stream(peerFinder.getFoundPeers().spliterator(), false).collect(Collectors.toList()), getCurrentTerm());
|
||||
}
|
||||
|
||||
private Runnable getOnLeaderFailure() {
|
||||
return new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
synchronized (mutex) {
|
||||
becomeCandidate("onLeaderFailure");
|
||||
}
|
||||
private void onLeaderFailure(Exception e) {
|
||||
synchronized (mutex) {
|
||||
if (mode != Mode.CANDIDATE) {
|
||||
assert lastKnownLeader.isPresent();
|
||||
logger.info(new ParameterizedMessage("master node [{}] failed, restarting discovery", lastKnownLeader.get()), e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "notification of leader failure";
|
||||
}
|
||||
};
|
||||
becomeCandidate("onLeaderFailure");
|
||||
}
|
||||
}
|
||||
|
||||
private void removeNode(DiscoveryNode discoveryNode, String reason) {
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.logging.log4j.LogManager;
|
|||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
@ -35,6 +36,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.discovery.zen.MasterFaultDetection;
|
||||
import org.elasticsearch.threadpool.ThreadPool.Names;
|
||||
import org.elasticsearch.transport.ConnectTransportException;
|
||||
import org.elasticsearch.transport.NodeDisconnectedException;
|
||||
import org.elasticsearch.transport.TransportConnectionListener;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
@ -50,6 +52,7 @@ import java.util.Objects;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
/**
|
||||
* The LeaderChecker is responsible for allowing followers to check that the currently elected leader is still connected and healthy. We are
|
||||
|
@ -83,13 +86,13 @@ public class LeaderChecker {
|
|||
private final TimeValue leaderCheckTimeout;
|
||||
private final int leaderCheckRetryCount;
|
||||
private final TransportService transportService;
|
||||
private final Runnable onLeaderFailure;
|
||||
private final Consumer<Exception> onLeaderFailure;
|
||||
|
||||
private AtomicReference<CheckScheduler> currentChecker = new AtomicReference<>();
|
||||
|
||||
private volatile DiscoveryNodes discoveryNodes;
|
||||
|
||||
public LeaderChecker(final Settings settings, final TransportService transportService, final Runnable onLeaderFailure) {
|
||||
public LeaderChecker(final Settings settings, final TransportService transportService, final Consumer<Exception> onLeaderFailure) {
|
||||
this.settings = settings;
|
||||
leaderCheckInterval = LEADER_CHECK_INTERVAL_SETTING.get(settings);
|
||||
leaderCheckTimeout = LEADER_CHECK_TIMEOUT_SETTING.get(settings);
|
||||
|
@ -260,16 +263,19 @@ public class LeaderChecker {
|
|||
}
|
||||
|
||||
if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) {
|
||||
logger.debug(new ParameterizedMessage("leader [{}] disconnected, failing immediately", leader), exp);
|
||||
leaderFailed();
|
||||
logger.debug(new ParameterizedMessage(
|
||||
"leader [{}] disconnected during check", leader), exp);
|
||||
leaderFailed(new ConnectTransportException(leader, "disconnected during check", exp));
|
||||
return;
|
||||
}
|
||||
|
||||
long failureCount = failureCountSinceLastSuccess.incrementAndGet();
|
||||
if (failureCount >= leaderCheckRetryCount) {
|
||||
logger.debug(new ParameterizedMessage("{} consecutive failures (limit [{}] is {}) so leader [{}] has failed",
|
||||
failureCount, LEADER_CHECK_RETRY_COUNT_SETTING.getKey(), leaderCheckRetryCount, leader), exp);
|
||||
leaderFailed();
|
||||
logger.debug(new ParameterizedMessage(
|
||||
"leader [{}] has failed {} consecutive checks (limit [{}] is {}); last failure was:",
|
||||
leader, failureCount, LEADER_CHECK_RETRY_COUNT_SETTING.getKey(), leaderCheckRetryCount), exp);
|
||||
leaderFailed(new ElasticsearchException(
|
||||
"node [" + leader + "] failed [" + failureCount + "] consecutive checks", exp));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -285,9 +291,19 @@ public class LeaderChecker {
|
|||
});
|
||||
}
|
||||
|
||||
void leaderFailed() {
|
||||
void leaderFailed(Exception e) {
|
||||
if (isClosed.compareAndSet(false, true)) {
|
||||
transportService.getThreadPool().generic().execute(onLeaderFailure);
|
||||
transportService.getThreadPool().generic().execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
onLeaderFailure.accept(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "notification of leader failure: " + e.getMessage();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
logger.trace("already closed, not failing leader");
|
||||
}
|
||||
|
@ -295,7 +311,8 @@ public class LeaderChecker {
|
|||
|
||||
void handleDisconnectedNode(DiscoveryNode discoveryNode) {
|
||||
if (discoveryNode.equals(leader)) {
|
||||
leaderFailed();
|
||||
logger.debug("leader [{}] disconnected", leader);
|
||||
leaderFailed(new NodeDisconnectedException(discoveryNode, "disconnected"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -64,14 +64,20 @@ public final class GeoJson {
|
|||
private static final ParseField FIELD_ORIENTATION = new ParseField("orientation");
|
||||
private static final ParseField FIELD_RADIUS = new ParseField("radius");
|
||||
|
||||
private GeoJson() {
|
||||
private final boolean rightOrientation;
|
||||
private final boolean coerce;
|
||||
private final boolean ignoreZValue;
|
||||
|
||||
public GeoJson(boolean rightOrientation, boolean coerce, boolean ignoreZValue) {
|
||||
this.rightOrientation = rightOrientation;
|
||||
this.coerce = coerce;
|
||||
this.ignoreZValue = ignoreZValue;
|
||||
}
|
||||
|
||||
public static Geometry fromXContent(XContentParser parser, boolean rightOrientation, boolean coerce, boolean ignoreZValue)
|
||||
public Geometry fromXContent(XContentParser parser)
|
||||
throws IOException {
|
||||
try (XContentSubParser subParser = new XContentSubParser(parser)) {
|
||||
return PARSER.apply(subParser, new ParserContext(rightOrientation, coerce, ignoreZValue));
|
||||
return PARSER.apply(subParser, this);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,26 +203,14 @@ public final class GeoJson {
|
|||
return builder.endObject();
|
||||
}
|
||||
|
||||
private static class ParserContext {
|
||||
public final boolean defaultOrientation;
|
||||
public final boolean coerce;
|
||||
public final boolean ignoreZValue;
|
||||
|
||||
ParserContext(boolean defaultOrientation, boolean coerce, boolean ignoreZValue) {
|
||||
this.defaultOrientation = defaultOrientation;
|
||||
this.coerce = coerce;
|
||||
this.ignoreZValue = ignoreZValue;
|
||||
}
|
||||
}
|
||||
|
||||
private static ConstructingObjectParser<Geometry, ParserContext> PARSER =
|
||||
private static ConstructingObjectParser<Geometry, GeoJson> PARSER =
|
||||
new ConstructingObjectParser<>("geojson", true, (a, c) -> {
|
||||
String type = (String) a[0];
|
||||
CoordinateNode coordinates = (CoordinateNode) a[1];
|
||||
@SuppressWarnings("unchecked") List<Geometry> geometries = (List<Geometry>) a[2];
|
||||
Boolean orientation = orientationFromString((String) a[3]);
|
||||
DistanceUnit.Distance radius = (DistanceUnit.Distance) a[4];
|
||||
return createGeometry(type, geometries, coordinates, orientation, c.defaultOrientation, c.coerce, radius);
|
||||
return createGeometry(type, geometries, coordinates, orientation, c.rightOrientation, c.coerce, radius);
|
||||
});
|
||||
|
||||
static {
|
||||
|
|
|
@ -32,22 +32,26 @@ import java.text.ParseException;
|
|||
*/
|
||||
public final class GeometryParser {
|
||||
|
||||
private GeometryParser() {
|
||||
private final GeoJson geoJsonParser;
|
||||
private final WellKnownText wellKnownTextParser;
|
||||
|
||||
public GeometryParser(boolean rightOrientation, boolean coerce, boolean ignoreZValue) {
|
||||
geoJsonParser = new GeoJson(rightOrientation, coerce, ignoreZValue);
|
||||
wellKnownTextParser = new WellKnownText();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses supplied XContent into Geometry
|
||||
*/
|
||||
public static Geometry parse(XContentParser parser, boolean orientation, boolean coerce, boolean ignoreZValue) throws IOException,
|
||||
public Geometry parse(XContentParser parser) throws IOException,
|
||||
ParseException {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
return GeoJson.fromXContent(parser, orientation, coerce, ignoreZValue);
|
||||
return geoJsonParser.fromXContent(parser);
|
||||
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||
// TODO: Add support for ignoreZValue and coerce to WKT
|
||||
return WellKnownText.fromWKT(parser.text());
|
||||
return wellKnownTextParser.fromWKT(parser.text());
|
||||
}
|
||||
throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates");
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.discovery;
|
|||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
|
@ -89,6 +90,13 @@ public class HandshakingTransportAddressConnector implements TransportAddressCon
|
|||
remoteNode = transportService.handshake(connection, probeHandshakeTimeout.millis());
|
||||
// success means (amongst other things) that the cluster names match
|
||||
logger.trace("[{}] handshake successful: {}", this, remoteNode);
|
||||
} catch (Exception e) {
|
||||
// we opened a connection and successfully performed a low-level handshake, so we were definitely talking to an
|
||||
// Elasticsearch node, but the high-level handshake failed indicating some kind of mismatched configurations
|
||||
// (e.g. cluster name) that the user should address
|
||||
logger.warn(new ParameterizedMessage("handshake failed for [{}]", this), e);
|
||||
listener.onFailure(e);
|
||||
return;
|
||||
} finally {
|
||||
IOUtils.closeWhileHandlingException(connection);
|
||||
}
|
||||
|
|
|
@ -52,9 +52,12 @@ import static org.elasticsearch.cluster.coordination.LeaderChecker.LEADER_CHECK_
|
|||
import static org.elasticsearch.node.Node.NODE_NAME_SETTING;
|
||||
import static org.elasticsearch.transport.TransportService.HANDSHAKE_ACTION_NAME;
|
||||
import static org.elasticsearch.transport.TransportService.NOOP_TRANSPORT_INTERCEPTOR;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.endsWith;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.matchesRegex;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class LeaderCheckerTests extends ESTestCase {
|
||||
|
@ -146,7 +149,10 @@ public class LeaderCheckerTests extends ESTestCase {
|
|||
final AtomicBoolean leaderFailed = new AtomicBoolean();
|
||||
|
||||
final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService,
|
||||
() -> assertTrue(leaderFailed.compareAndSet(false, true)));
|
||||
e -> {
|
||||
assertThat(e.getMessage(), matchesRegex("node \\[.*\\] failed \\[[1-9][0-9]*\\] consecutive checks"));
|
||||
assertTrue(leaderFailed.compareAndSet(false, true));
|
||||
});
|
||||
|
||||
logger.info("--> creating first checker");
|
||||
leaderChecker.updateLeader(leader1);
|
||||
|
@ -247,7 +253,10 @@ public class LeaderCheckerTests extends ESTestCase {
|
|||
|
||||
final AtomicBoolean leaderFailed = new AtomicBoolean();
|
||||
final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService,
|
||||
() -> assertTrue(leaderFailed.compareAndSet(false, true)));
|
||||
e -> {
|
||||
assertThat(e.getMessage(), anyOf(endsWith("disconnected"), endsWith("disconnected during check")));
|
||||
assertTrue(leaderFailed.compareAndSet(false, true));
|
||||
});
|
||||
|
||||
leaderChecker.updateLeader(leader);
|
||||
{
|
||||
|
@ -316,7 +325,7 @@ public class LeaderCheckerTests extends ESTestCase {
|
|||
transportService.start();
|
||||
transportService.acceptIncomingRequests();
|
||||
|
||||
final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService, () -> fail("shouldn't be checking anything"));
|
||||
final LeaderChecker leaderChecker = new LeaderChecker(settings, transportService, e -> fail("shouldn't be checking anything"));
|
||||
|
||||
final DiscoveryNodes discoveryNodes
|
||||
= DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId()).build();
|
||||
|
|
|
@ -70,7 +70,7 @@ abstract class BaseGeoParsingTestCase extends ESTestCase {
|
|||
protected void assertGeometryEquals(org.elasticsearch.geo.geometry.Geometry expected, XContentBuilder geoJson) throws IOException {
|
||||
try (XContentParser parser = createParser(geoJson)) {
|
||||
parser.nextToken();
|
||||
assertEquals(expected, GeoJson.fromXContent(parser, true, false, false));
|
||||
assertEquals(expected, new GeoJson(true, false, false).fromXContent(parser));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
Line expected = new Line(new double[] {0.0, 1.0}, new double[] { 100.0, 101.0});
|
||||
try (XContentParser parser = createParser(lineGeoJson)) {
|
||||
parser.nextToken();
|
||||
assertEquals(expected, GeoJson.fromXContent(parser, false, false, true));
|
||||
assertEquals(expected, new GeoJson(false, false, true).fromXContent(parser));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(pointGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(lineGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -178,7 +178,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(multilinesGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -189,7 +189,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(multilinesGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, false, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(false, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -239,7 +239,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
));
|
||||
try (XContentParser parser = createParser(polygonGeoJson)) {
|
||||
parser.nextToken();
|
||||
assertEquals(expected, GeoJson.fromXContent(parser, true, false, true));
|
||||
assertEquals(expected, new GeoJson(true, false, true).fromXContent(parser));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -259,7 +259,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(polygonGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, true));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, true).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -275,7 +275,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(invalidPoint1)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -288,7 +288,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(invalidPoint2)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -302,7 +302,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(invalidMultipoint1)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -315,7 +315,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(invalidMultipoint2)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -329,7 +329,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject();
|
||||
try (XContentParser parser = createParser(invalidMultipoint3)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -370,7 +370,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -391,7 +391,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
.endObject());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -406,7 +406,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -421,7 +421,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -436,7 +436,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -449,7 +449,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -460,7 +460,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -473,7 +473,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -710,7 +710,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(tooLittlePointGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
|
@ -723,7 +723,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(emptyPointGeoJson)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
}
|
||||
|
@ -749,7 +749,7 @@ public class GeoJsonParserTests extends BaseGeoParsingTestCase {
|
|||
parser.nextToken(); // foo
|
||||
parser.nextToken(); // start object
|
||||
parser.nextToken(); // start object
|
||||
expectThrows(XContentParseException.class, () -> GeoJson.fromXContent(parser, true, false, false));
|
||||
expectThrows(XContentParseException.class, () -> new GeoJson(true, false, false).fromXContent(parser));
|
||||
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // end of the document
|
||||
assertNull(parser.nextToken()); // no more elements afterwards
|
||||
}
|
||||
|
|
|
@ -49,6 +49,7 @@ public class GeoJsonSerializationTests extends ESTestCase {
|
|||
private static class GeometryWrapper implements ToXContentObject {
|
||||
|
||||
private Geometry geometry;
|
||||
private static GeoJson PARSER = new GeoJson(true, false, true);
|
||||
|
||||
GeometryWrapper(Geometry geometry) {
|
||||
this.geometry = geometry;
|
||||
|
@ -61,7 +62,7 @@ public class GeoJsonSerializationTests extends ESTestCase {
|
|||
|
||||
public static GeometryWrapper fromXContent(XContentParser parser) throws IOException {
|
||||
parser.nextToken();
|
||||
return new GeometryWrapper(GeoJson.fromXContent(parser, true, false, true));
|
||||
return new GeometryWrapper(PARSER.fromXContent(parser));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,7 +44,7 @@ public class GeometryParserTests extends ESTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(pointGeoJson)) {
|
||||
parser.nextToken();
|
||||
assertEquals(new Point(0, 100), GeometryParser.parse(parser, true, randomBoolean(), randomBoolean()));
|
||||
assertEquals(new Point(0, 100), new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
|
||||
}
|
||||
|
||||
XContentBuilder pointGeoJsonWithZ = XContentFactory.jsonBuilder()
|
||||
|
@ -55,13 +55,13 @@ public class GeometryParserTests extends ESTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(pointGeoJsonWithZ)) {
|
||||
parser.nextToken();
|
||||
assertEquals(new Point(0, 100, 10.0), GeometryParser.parse(parser, true, randomBoolean(), true));
|
||||
assertEquals(new Point(0, 100, 10.0), new GeometryParser(true, randomBoolean(), true).parse(parser));
|
||||
}
|
||||
|
||||
|
||||
try (XContentParser parser = createParser(pointGeoJsonWithZ)) {
|
||||
parser.nextToken();
|
||||
expectThrows(XContentParseException.class, () -> GeometryParser.parse(parser, true, randomBoolean(), false));
|
||||
expectThrows(XContentParseException.class, () -> new GeometryParser(true, randomBoolean(), false).parse(parser));
|
||||
}
|
||||
|
||||
XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder()
|
||||
|
@ -81,13 +81,13 @@ public class GeometryParserTests extends ESTestCase {
|
|||
try (XContentParser parser = createParser(polygonGeoJson)) {
|
||||
parser.nextToken();
|
||||
// Coerce should automatically close the polygon
|
||||
assertEquals(p, GeometryParser.parse(parser, true, true, randomBoolean()));
|
||||
assertEquals(p, new GeometryParser(true, true, randomBoolean()).parse(parser));
|
||||
}
|
||||
|
||||
try (XContentParser parser = createParser(polygonGeoJson)) {
|
||||
parser.nextToken();
|
||||
// No coerce - the polygon parsing should fail
|
||||
expectThrows(XContentParseException.class, () -> GeometryParser.parse(parser, true, false, randomBoolean()));
|
||||
expectThrows(XContentParseException.class, () -> new GeometryParser(true, false, randomBoolean()).parse(parser));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,7 +101,7 @@ public class GeometryParserTests extends ESTestCase {
|
|||
parser.nextToken(); // Start object
|
||||
parser.nextToken(); // Field Name
|
||||
parser.nextToken(); // Field Value
|
||||
assertEquals(new Point(0, 100), GeometryParser.parse(parser, true, randomBoolean(), randomBoolean()));
|
||||
assertEquals(new Point(0, 100), new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ public class GeometryParserTests extends ESTestCase {
|
|||
parser.nextToken(); // Start object
|
||||
parser.nextToken(); // Field Name
|
||||
parser.nextToken(); // Field Value
|
||||
assertNull(GeometryParser.parse(parser, true, randomBoolean(), randomBoolean()));
|
||||
assertNull(new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -130,7 +130,7 @@ public class GeometryParserTests extends ESTestCase {
|
|||
parser.nextToken(); // Field Name
|
||||
parser.nextToken(); // Field Value
|
||||
ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
|
||||
() -> GeometryParser.parse(parser, true, randomBoolean(), randomBoolean()));
|
||||
() -> new GeometryParser(true, randomBoolean(), randomBoolean()).parse(parser));
|
||||
assertEquals("shape must be an object consisting of type and coordinates", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -137,6 +137,7 @@ public class ConcurrentSeqNoVersioningIT extends AbstractDisruptionTestCase {
|
|||
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(Settings.builder()
|
||||
.put(indexSettings())
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1 + randomInt(2))
|
||||
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomInt(3))
|
||||
));
|
||||
|
|
|
@ -96,33 +96,33 @@ public class StartDataFrameTransformAction extends Action<StartDataFrameTransfor
|
|||
}
|
||||
|
||||
public static class Response extends BaseTasksResponse implements ToXContentObject {
|
||||
private final boolean started;
|
||||
private final boolean acknowledged;
|
||||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
started = in.readBoolean();
|
||||
acknowledged = in.readBoolean();
|
||||
}
|
||||
|
||||
public Response(boolean started) {
|
||||
public Response(boolean acknowledged) {
|
||||
super(Collections.emptyList(), Collections.emptyList());
|
||||
this.started = started;
|
||||
this.acknowledged = acknowledged;
|
||||
}
|
||||
|
||||
public boolean isStarted() {
|
||||
return started;
|
||||
public boolean isAcknowledged() {
|
||||
return acknowledged;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(started);
|
||||
out.writeBoolean(acknowledged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
toXContentCommon(builder, params);
|
||||
builder.field("started", started);
|
||||
builder.field("acknowledged", acknowledged);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
@ -137,12 +137,12 @@ public class StartDataFrameTransformAction extends Action<StartDataFrameTransfor
|
|||
return false;
|
||||
}
|
||||
Response response = (Response) obj;
|
||||
return started == response.started;
|
||||
return acknowledged == response.acknowledged;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(started);
|
||||
return Objects.hash(acknowledged);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -158,40 +158,40 @@ public class StopDataFrameTransformAction extends Action<StopDataFrameTransformA
|
|||
|
||||
public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject {
|
||||
|
||||
private final boolean stopped;
|
||||
private final boolean acknowledged;
|
||||
|
||||
public Response(StreamInput in) throws IOException {
|
||||
super(in);
|
||||
stopped = in.readBoolean();
|
||||
acknowledged = in.readBoolean();
|
||||
}
|
||||
|
||||
public Response(boolean stopped) {
|
||||
public Response(boolean acknowledged) {
|
||||
super(Collections.emptyList(), Collections.emptyList());
|
||||
this.stopped = stopped;
|
||||
this.acknowledged = acknowledged;
|
||||
}
|
||||
|
||||
public Response(List<TaskOperationFailure> taskFailures,
|
||||
List<? extends ElasticsearchException> nodeFailures,
|
||||
boolean stopped) {
|
||||
boolean acknowledged) {
|
||||
super(taskFailures, nodeFailures);
|
||||
this.stopped = stopped;
|
||||
this.acknowledged = acknowledged;
|
||||
}
|
||||
|
||||
public boolean isStopped() {
|
||||
return stopped;
|
||||
public boolean isAcknowledged() {
|
||||
return acknowledged;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(stopped);
|
||||
out.writeBoolean(acknowledged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
toXContentCommon(builder, params);
|
||||
builder.field("stopped", stopped);
|
||||
builder.field("acknowledged", acknowledged);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
@ -203,12 +203,12 @@ public class StopDataFrameTransformAction extends Action<StopDataFrameTransformA
|
|||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
Response response = (Response) o;
|
||||
return stopped == response.stopped;
|
||||
return acknowledged == response.acknowledged;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(stopped);
|
||||
return Objects.hash(acknowledged);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -225,7 +225,6 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/42084")
|
||||
public void testStateMachine() throws Exception {
|
||||
AtomicReference<IndexerState> state = new AtomicReference<>(IndexerState.STOPPED);
|
||||
final ExecutorService executor = Executors.newFixedThreadPool(1);
|
||||
|
@ -236,10 +235,11 @@ public class AsyncTwoPhaseIndexerTests extends ESTestCase {
|
|||
assertThat(indexer.getState(), equalTo(IndexerState.STARTED));
|
||||
assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis()));
|
||||
assertThat(indexer.getState(), equalTo(IndexerState.INDEXING));
|
||||
assertTrue(awaitBusy(() -> indexer.getPosition() == 2));
|
||||
countDownLatch.countDown();
|
||||
|
||||
assertThat(indexer.getPosition(), equalTo(2));
|
||||
assertTrue(awaitBusy(() -> isFinished.get()));
|
||||
assertThat(indexer.getPosition(), equalTo(3));
|
||||
|
||||
assertFalse(isStopped.get());
|
||||
assertThat(indexer.getStep(), equalTo(6));
|
||||
assertThat(indexer.getStats().getNumInvocations(), equalTo(1L));
|
||||
|
|
|
@ -50,7 +50,7 @@ public class DataFrameTransformIT extends DataFrameIntegTestCase {
|
|||
REVIEWS_INDEX_NAME);
|
||||
|
||||
assertTrue(putDataFrameTransform(config, RequestOptions.DEFAULT).isAcknowledged());
|
||||
assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isStarted());
|
||||
assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged());
|
||||
|
||||
waitUntilCheckpoint(config.getId(), 1L);
|
||||
|
||||
|
|
|
@ -191,7 +191,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
|
|||
startTransformRequest.setOptions(expectWarnings(warnings));
|
||||
}
|
||||
Map<String, Object> startTransformResponse = entityAsMap(client().performRequest(startTransformRequest));
|
||||
assertThat(startTransformResponse.get("started"), equalTo(Boolean.TRUE));
|
||||
assertThat(startTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
|
||||
}
|
||||
|
||||
protected void stopDataFrameTransform(String transformId, boolean force) throws Exception {
|
||||
|
@ -200,7 +200,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase {
|
|||
stopTransformRequest.addParameter(DataFrameField.FORCE.getPreferredName(), Boolean.toString(force));
|
||||
stopTransformRequest.addParameter(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), Boolean.toString(true));
|
||||
Map<String, Object> stopTransformResponse = entityAsMap(client().performRequest(stopTransformRequest));
|
||||
assertThat(stopTransformResponse.get("stopped"), equalTo(Boolean.TRUE));
|
||||
assertThat(stopTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
|
||||
}
|
||||
|
||||
protected void startAndWaitForTransform(String transformId, String dataFrameIndex) throws Exception {
|
||||
|
|
|
@ -124,8 +124,8 @@ public class TransportStopDataFrameTransformAction extends
|
|||
}
|
||||
|
||||
// if tasks is empty allMatch is 'vacuously satisfied'
|
||||
boolean allStopped = tasks.stream().allMatch(StopDataFrameTransformAction.Response::isStopped);
|
||||
return new StopDataFrameTransformAction.Response(allStopped);
|
||||
boolean allAcknowledged = tasks.stream().allMatch(StopDataFrameTransformAction.Response::isAcknowledged);
|
||||
return new StopDataFrameTransformAction.Response(allAcknowledged);
|
||||
}
|
||||
|
||||
private ActionListener<StopDataFrameTransformAction.Response>
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransform;
|
|||
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformConfig;
|
||||
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformState;
|
||||
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformStateAndStats;
|
||||
import org.elasticsearch.xpack.core.dataframe.transforms.DataFrameTransformTaskState;
|
||||
import org.elasticsearch.xpack.core.indexing.IndexerState;
|
||||
import org.elasticsearch.xpack.core.scheduler.SchedulerEngine;
|
||||
import org.elasticsearch.xpack.dataframe.DataFrame;
|
||||
|
@ -223,18 +222,8 @@ public class DataFrameTransformPersistentTasksExecutor extends PersistentTasksEx
|
|||
DataFrameTransformTask.ClientDataFrameIndexerBuilder indexerBuilder,
|
||||
Long previousCheckpoint,
|
||||
ActionListener<StartDataFrameTransformTaskAction.Response> listener) {
|
||||
// If we are stopped, and it is an initial run, this means we have never been started,
|
||||
// attempt to start the task
|
||||
|
||||
buildTask.initializeIndexer(indexerBuilder);
|
||||
// TODO isInitialRun is false after relocation??
|
||||
if (buildTask.getState().getTaskState().equals(DataFrameTransformTaskState.STOPPED) && buildTask.isInitialRun()) {
|
||||
logger.info("Data frame transform [{}] created.", buildTask.getTransformId());
|
||||
buildTask.start(previousCheckpoint, listener);
|
||||
} else {
|
||||
logger.debug("No need to start task. Its current state is: {}", buildTask.getState().getIndexerState());
|
||||
listener.onResponse(new StartDataFrameTransformTaskAction.Response(true));
|
||||
}
|
||||
buildTask.start(previousCheckpoint, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -174,13 +174,8 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S
|
|||
}
|
||||
}
|
||||
|
||||
public boolean isStopped() {
|
||||
IndexerState currentState = getIndexer() == null ? initialIndexerState : getIndexer().getState();
|
||||
return currentState.equals(IndexerState.STOPPED);
|
||||
}
|
||||
|
||||
boolean isInitialRun() {
|
||||
return getIndexer() != null && getIndexer().initialRun();
|
||||
public void setTaskStateStopped() {
|
||||
taskState.set(DataFrameTransformTaskState.STOPPED);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -235,11 +230,9 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S
|
|||
|
||||
public synchronized void stop() {
|
||||
if (getIndexer() == null) {
|
||||
return;
|
||||
}
|
||||
// taskState is initialized as STOPPED and is updated in tandem with the indexerState
|
||||
// Consequently, if it is STOPPED, we consider the whole task STOPPED.
|
||||
if (taskState.get() == DataFrameTransformTaskState.STOPPED) {
|
||||
// If there is no indexer the task has not been triggered
|
||||
// but it still needs to be stopped and removed
|
||||
shutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -609,6 +602,8 @@ public class DataFrameTransformTask extends AllocatedPersistentTask implements S
|
|||
protected void onStop() {
|
||||
auditor.info(transformConfig.getId(), "Indexer has stopped");
|
||||
logger.info("Data frame transform [{}] indexer has stopped", transformConfig.getId());
|
||||
|
||||
transformTask.setTaskStateStopped();
|
||||
transformsConfigManager.putOrUpdateTransformStats(
|
||||
new DataFrameTransformStateAndStats(transformId, transformTask.getState(), getStats(),
|
||||
DataFrameTransformCheckpointingInfo.EMPTY), // TODO should this be null
|
||||
|
|
|
@ -531,9 +531,8 @@ public class TokenServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) {
|
||||
// move to expiry
|
||||
clock.fastForwardSeconds(Math.toIntExact(defaultExpiration.getSeconds()) - fastForwardAmount);
|
||||
clock.rewind(TimeValue.timeValueNanos(clock.instant().getNano())); // trim off nanoseconds since don't store them in the index
|
||||
// move to expiry, stripping nanoseconds, as we don't store them in the security-tokens index
|
||||
clock.setTime(userToken.getExpirationTime().truncatedTo(ChronoUnit.MILLIS).atZone(clock.getZone()));
|
||||
PlainActionFuture<UserToken> future = new PlainActionFuture<>();
|
||||
tokenService.getAndValidateToken(requestContext, future);
|
||||
assertAuthentication(authentication, future.get().getAuthentication());
|
||||
|
|
|
@ -54,6 +54,8 @@ import static org.elasticsearch.xpack.sql.jdbc.JdbcDateUtils.timeAsTime;
|
|||
*/
|
||||
final class TypeConverter {
|
||||
|
||||
private static WellKnownText WKT = new WellKnownText();
|
||||
|
||||
private TypeConverter() {}
|
||||
|
||||
/**
|
||||
|
@ -246,7 +248,7 @@ final class TypeConverter {
|
|||
case GEO_POINT:
|
||||
case GEO_SHAPE:
|
||||
try {
|
||||
return WellKnownText.fromWKT(v.toString());
|
||||
return WKT.fromWKT(v.toString());
|
||||
} catch (IOException | ParseException ex) {
|
||||
throw new SQLException("Cannot parse geo_shape", ex);
|
||||
}
|
||||
|
|
|
@ -51,6 +51,8 @@ public class JdbcAssert {
|
|||
|
||||
private static final IntObjectHashMap<EsType> SQL_TO_TYPE = new IntObjectHashMap<>();
|
||||
|
||||
private static final WellKnownText WKT = new WellKnownText();
|
||||
|
||||
static {
|
||||
for (EsType type : EsType.values()) {
|
||||
SQL_TO_TYPE.putIfAbsent(type.getVendorTypeNumber().intValue(), type);
|
||||
|
@ -270,7 +272,7 @@ public class JdbcAssert {
|
|||
if (actualObject instanceof Geometry) {
|
||||
// We need to convert the expected object to libs/geo Geometry for comparision
|
||||
try {
|
||||
expectedObject = WellKnownText.fromWKT(expectedObject.toString());
|
||||
expectedObject = WKT.fromWKT(expectedObject.toString());
|
||||
} catch (IOException | ParseException ex) {
|
||||
fail(ex.getMessage());
|
||||
}
|
||||
|
|
|
@ -49,6 +49,10 @@ public class GeoShape implements ToXContentFragment, NamedWriteable {
|
|||
|
||||
private final Geometry shape;
|
||||
|
||||
private static final GeometryParser GEOMETRY_PARSER = new GeometryParser(true, true, true);
|
||||
|
||||
private static final WellKnownText WKT_PARSER = new WellKnownText();
|
||||
|
||||
public GeoShape(double lon, double lat) {
|
||||
shape = new Point(lat, lon);
|
||||
}
|
||||
|
@ -72,17 +76,17 @@ public class GeoShape implements ToXContentFragment, NamedWriteable {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(WellKnownText.toWKT(shape));
|
||||
out.writeString(WKT_PARSER.toWKT(shape));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return WellKnownText.toWKT(shape);
|
||||
return WKT_PARSER.toWKT(shape);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.value(WellKnownText.toWKT(shape));
|
||||
return builder.value(WKT_PARSER.toWKT(shape));
|
||||
}
|
||||
|
||||
public Geometry toGeometry() {
|
||||
|
@ -216,7 +220,7 @@ public class GeoShape implements ToXContentFragment, NamedWriteable {
|
|||
parser.nextToken(); // start object
|
||||
parser.nextToken(); // field name
|
||||
parser.nextToken(); // field value
|
||||
return GeometryParser.parse(parser, true, true, true);
|
||||
return GEOMETRY_PARSER.parse(parser);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ teardown:
|
|||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
---
|
||||
"Test start missing transform":
|
||||
|
@ -56,7 +56,7 @@ teardown:
|
|||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
catch: /Unable to start data frame transform \[airline-transform-start-stop\] as it is in state \[STARTED\]/
|
||||
|
@ -68,7 +68,7 @@ teardown:
|
|||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
index: airline-data-by-airline-start-stop
|
||||
|
@ -83,17 +83,20 @@ teardown:
|
|||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
index: airline-data-by-airline-start-stop
|
||||
- match: { airline-data-by-airline-start-stop.mappings: {} }
|
||||
---
|
||||
"Test start/stop/start transform":
|
||||
- skip:
|
||||
reason: "https://github.com/elastic/elasticsearch/issues/42650"
|
||||
version: "all"
|
||||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
data_frame.get_data_frame_transform_stats:
|
||||
|
@ -107,20 +110,20 @@ teardown:
|
|||
data_frame.stop_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
wait_for_completion: true
|
||||
- match: { stopped: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
data_frame.get_data_frame_transform_stats:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { count: 1 }
|
||||
- match: { transforms.0.id: "airline-transform-start-stop" }
|
||||
# - match: { transforms.0.state.indexer_state: "stopped" }
|
||||
# - match: { transforms.0.state.task_state: "stopped" }
|
||||
- match: { transforms.0.state.indexer_state: "stopped" }
|
||||
- match: { transforms.0.state.task_state: "stopped" }
|
||||
|
||||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
data_frame.get_data_frame_transform_stats:
|
||||
|
@ -142,7 +145,7 @@ teardown:
|
|||
- do:
|
||||
data_frame.stop_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { stopped: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
---
|
||||
"Test start/stop only starts/stops specified transform":
|
||||
|
@ -161,7 +164,7 @@ teardown:
|
|||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
data_frame.get_data_frame_transform_stats:
|
||||
|
@ -182,12 +185,12 @@ teardown:
|
|||
- do:
|
||||
data_frame.start_data_frame_transform:
|
||||
transform_id: "airline-transform-start-later"
|
||||
- match: { started: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
data_frame.stop_data_frame_transform:
|
||||
transform_id: "airline-transform-start-stop"
|
||||
- match: { stopped: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
data_frame.get_data_frame_transform_stats:
|
||||
|
@ -201,7 +204,7 @@ teardown:
|
|||
data_frame.stop_data_frame_transform:
|
||||
transform_id: "airline-transform-start-later"
|
||||
wait_for_completion: true
|
||||
- match: { stopped: true }
|
||||
- match: { acknowledged: true }
|
||||
|
||||
- do:
|
||||
data_frame.delete_data_frame_transform:
|
||||
|
|
Loading…
Reference in New Issue