mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-27 02:18:42 +00:00
Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
cdc4f57a77
@ -337,7 +337,13 @@ class ClusterFormationTasks {
|
||||
if (node.nodeVersion.major >= 7) {
|
||||
esConfig['indices.breaker.total.use_real_memory'] = false
|
||||
}
|
||||
esConfig.putAll(node.config.settings)
|
||||
for (Map.Entry<String, Object> setting : node.config.settings) {
|
||||
if (setting.value == null) {
|
||||
esConfig.remove(setting.key)
|
||||
} else {
|
||||
esConfig.put(setting.key, setting.value)
|
||||
}
|
||||
}
|
||||
|
||||
Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
|
||||
writeConfig.doFirst {
|
||||
|
@ -462,7 +462,6 @@
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]ExpectedShardSizeAllocationTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FailedNodeRoutingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FailedShardsRoutingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FilterRoutingTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]IndexBalanceTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]NodeVersionAllocationDeciderTests.java" checks="LineLength" />
|
||||
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]PreferLocalPrimariesToRelocatingPrimariesTests.java" checks="LineLength" />
|
||||
|
@ -1,5 +1,5 @@
|
||||
elasticsearch = 7.0.0-alpha1
|
||||
lucene = 8.0.0-snapshot-4d78db26be
|
||||
lucene = 8.0.0-snapshot-66c671ea80
|
||||
|
||||
# optional dependencies
|
||||
spatial4j = 0.7
|
||||
|
@ -55,7 +55,7 @@ public final class IngestClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::putPipeline, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -68,7 +68,7 @@ public final class IngestClient {
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::putPipeline, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -82,7 +82,7 @@ public final class IngestClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::getPipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options,
|
||||
GetPipelineResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -95,7 +95,7 @@ public final class IngestClient {
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener<GetPipelineResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::getPipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options,
|
||||
GetPipelineResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -110,7 +110,7 @@ public final class IngestClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::deletePipeline, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -124,7 +124,7 @@ public final class IngestClient {
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::deletePipeline, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -140,7 +140,7 @@ public final class IngestClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
|
||||
SimulatePipelineResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -157,7 +157,7 @@ public final class IngestClient {
|
||||
public void simulateAsync(SimulatePipelineRequest request,
|
||||
RequestOptions options,
|
||||
ActionListener<SimulatePipelineResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
|
||||
SimulatePipelineResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class IngestRequestConverters {
|
||||
|
||||
static Request getPipeline(GetPipelineRequest getPipelineRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addCommaSeparatedPathParts(getPipelineRequest.getIds())
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(putPipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withTimeout(putPipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());
|
||||
|
||||
request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(deletePipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withTimeout(deletePipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException {
|
||||
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline");
|
||||
if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) {
|
||||
builder.addPathPart(simulatePipelineRequest.getId());
|
||||
}
|
||||
builder.addPathPartAsIs("_simulate");
|
||||
String endpoint = builder.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose()));
|
||||
request.setEntity(RequestConverters.createEntity(simulatePipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
}
|
@ -19,14 +19,18 @@
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.client.RequestConverters.EndpointBuilder;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
@ -34,13 +38,16 @@ import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||
import static org.elasticsearch.client.RequestConverters.createContentType;
|
||||
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||
|
||||
final class MLRequestConverters {
|
||||
@ -147,6 +154,19 @@ final class MLRequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request forecastJob(ForecastJobRequest forecastJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(forecastJobRequest.getJobId())
|
||||
.addPathPartAsIs("_forecast")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
request.setEntity(createEntity(forecastJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
@ -202,6 +222,35 @@ final class MLRequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request postData(PostDataRequest postDataRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(postDataRequest.getJobId())
|
||||
.addPathPartAsIs("_data")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
if (postDataRequest.getResetStart() != null) {
|
||||
params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart());
|
||||
}
|
||||
if (postDataRequest.getResetEnd() != null) {
|
||||
params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd());
|
||||
}
|
||||
BytesReference content = postDataRequest.getContent();
|
||||
if (content != null) {
|
||||
BytesRef source = postDataRequest.getContent().toBytesRef();
|
||||
HttpEntity byteEntity = new ByteArrayEntity(source.bytes,
|
||||
source.offset,
|
||||
source.length,
|
||||
createContentType(postDataRequest.getXContentType()));
|
||||
request.setEntity(byteEntity);
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getInfluencers(GetInfluencersRequest getInfluencersRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
|
@ -19,6 +19,10 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.CloseJobResponse;
|
||||
@ -358,6 +362,28 @@ public final class MachineLearningClient {
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a forecast of an existing, opened Machine Learning Job
|
||||
*
|
||||
* This predicts the future behavior of a time series by using its historical behavior.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
* </p>
|
||||
* @param request ForecastJobRequest with forecasting options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing forecast acknowledgement and new forecast's ID
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public ForecastJobResponse forecastJob(ForecastJobRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job}
|
||||
*
|
||||
@ -374,6 +400,28 @@ public final class MachineLearningClient {
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a forecast of an existing, opened Machine Learning Job asynchronously
|
||||
*
|
||||
* This predicts the future behavior of a time series by using its historical behavior.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-forecast.html">Forecast ML Job Documentation</a>
|
||||
* </p>
|
||||
* @param request ForecastJobRequest with forecasting options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void forecastJobAsync(ForecastJobRequest request, RequestOptions options, ActionListener<ForecastJobResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::forecastJob,
|
||||
options,
|
||||
ForecastJobResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously
|
||||
*
|
||||
@ -501,6 +549,52 @@ public final class MachineLearningClient {
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends data to an anomaly detection job for analysis.
|
||||
*
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
* </p>
|
||||
*
|
||||
* @param request PostDataRequest containing the data to post and some additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return response containing operational progress about the job
|
||||
* @throws IOException when there is a serialization issue sending the request or receiving the response
|
||||
*/
|
||||
public PostDataResponse postData(PostDataRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends data to an anomaly detection job for analysis, asynchronously
|
||||
*
|
||||
* NOTE: The job must have a state of open to receive and process the data.
|
||||
*
|
||||
* <p>
|
||||
* For additional info
|
||||
* see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html">ML POST Data documentation</a>
|
||||
* </p>
|
||||
*
|
||||
* @param request PostDataRequest containing the data to post and some additional options
|
||||
* @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener Listener to be notified upon request completion
|
||||
*/
|
||||
public void postDataAsync(PostDataRequest request, RequestOptions options, ActionListener<PostDataResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||
MLRequestConverters::postData,
|
||||
options,
|
||||
PostDataResponse::fromXContent,
|
||||
listener,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the influencers for a Machine Learning Job.
|
||||
* <p>
|
||||
|
@ -49,7 +49,7 @@ public final class MigrationClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public IndexUpgradeInfoResponse getAssistance(IndexUpgradeInfoRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::getMigrationAssistance, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, MigrationRequestConverters::getMigrationAssistance, options,
|
||||
IndexUpgradeInfoResponse::fromXContent, Collections.emptySet());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
|
||||
public class MigrationRequestConverters {
|
||||
|
||||
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
|
||||
RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/migration/assistance")
|
||||
.addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices());
|
||||
String endpoint = endpointBuilder.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions());
|
||||
return request;
|
||||
}
|
||||
}
|
@ -30,17 +30,9 @@ import org.apache.http.entity.ContentType;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
|
||||
@ -72,10 +64,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
@ -111,15 +99,10 @@ import org.elasticsearch.index.reindex.AbstractBulkByScrollRequest;
|
||||
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.elasticsearch.index.reindex.ReindexRequest;
|
||||
import org.elasticsearch.index.reindex.UpdateByQueryRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackUsageRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.ExplainLifecycleRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.ExplainLifeimport org.elasticsearch.client.indexlifecycle.ExplainLifecycleRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.SetIndexLifecyclePolicyRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.StartILMRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.StopILMRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.mustache.MultiSearchTemplateRequest;
|
||||
import org.elasticsearch.script.mustache.SearchTemplateRequest;
|
||||
@ -131,10 +114,8 @@ import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Locale;
|
||||
import java.util.StringJoiner;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
final class RequestConverters {
|
||||
static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON;
|
||||
@ -713,47 +694,6 @@ final class RequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getPipeline(GetPipelineRequest getPipelineRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addCommaSeparatedPathParts(getPipelineRequest.getIds())
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(putPipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withTimeout(putPipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());
|
||||
|
||||
request.setEntity(createEntity(putPipelineRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ingest/pipeline")
|
||||
.addPathPart(deletePipelineRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withTimeout(deletePipelineRequest.timeout());
|
||||
parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request reindex(ReindexRequest reindexRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
@ -901,126 +841,6 @@ final class RequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) {
|
||||
String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories();
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot").addCommaSeparatedPathParts(repositories).build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout());
|
||||
parameters.withLocal(getRepositoriesRequest.local());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createRepository(PutRepositoryRequest putRepositoryRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(putRepositoryRequest.timeout());
|
||||
parameters.withVerify(putRepositoryRequest.verify());
|
||||
|
||||
request.setEntity(createEntity(putRepositoryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot").addPathPart(deleteRepositoryRequest.name()).build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(deleteRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(verifyRepositoryRequest.name())
|
||||
.addPathPartAsIs("_verify")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(verifyRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPart("_snapshot")
|
||||
.addPathPart(createSnapshotRequest.repository())
|
||||
.addPathPart(createSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
|
||||
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(createEntity(createSnapshotRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) {
|
||||
EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(getSnapshotsRequest.repository());
|
||||
String endpoint;
|
||||
if (getSnapshotsRequest.snapshots().length == 0) {
|
||||
endpoint = endpointBuilder.addPathPart("_all").build();
|
||||
} else {
|
||||
endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build();
|
||||
}
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
|
||||
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
|
||||
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(snapshotsStatusRequest.repository())
|
||||
.addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots())
|
||||
.addPathPartAsIs("_status")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
|
||||
parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(restoreSnapshotRequest.repository())
|
||||
.addPathPart(restoreSnapshotRequest.snapshot())
|
||||
.addPathPartAsIs("_restore")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout());
|
||||
parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(createEntity(restoreSnapshotRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(deleteSnapshotRequest.repository())
|
||||
.addPathPart(deleteSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
@ -1050,20 +870,6 @@ final class RequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException {
|
||||
EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ingest/pipeline");
|
||||
if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) {
|
||||
builder.addPathPart(simulatePipelineRequest.getId());
|
||||
}
|
||||
builder.addPathPartAsIs("_simulate");
|
||||
String endpoint = builder.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose()));
|
||||
request.setEntity(createEntity(simulatePipelineRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getAlias(GetAliasesRequest getAliasesRequest) {
|
||||
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
|
||||
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
||||
@ -1085,6 +891,19 @@ final class RequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withTimeout(putStoredScriptRequest.timeout());
|
||||
params.withMasterTimeout(putStoredScriptRequest.masterNodeTimeout());
|
||||
if (Strings.hasText(putStoredScriptRequest.context())) {
|
||||
params.putParam("context", putStoredScriptRequest.context());
|
||||
}
|
||||
request.setEntity(createEntity(putStoredScriptRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request analyze(AnalyzeRequest request) throws IOException {
|
||||
EndpointBuilder builder = new EndpointBuilder();
|
||||
String index = request.index();
|
||||
@ -1109,63 +928,7 @@ final class RequestConverters {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(deleteStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withTimeout(deleteStoredScriptRequest.timeout());
|
||||
params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xPackInfo(XPackInfoRequest infoRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack");
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
request.addParameter("human", "false");
|
||||
}
|
||||
if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) {
|
||||
request.addParameter("categories", infoRequest.getCategories().stream()
|
||||
.map(c -> c.toString().toLowerCase(Locale.ROOT))
|
||||
.collect(Collectors.joining(",")));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(putWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request).withVersion(putWatchRequest.getVersion());
|
||||
if (putWatchRequest.isActive() == false) {
|
||||
params.putParam("active", "false");
|
||||
}
|
||||
ContentType contentType = createContentType(putWatchRequest.xContentType());
|
||||
BytesReference source = putWatchRequest.getSource();
|
||||
request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xPackWatcherDeleteWatch(DeleteWatchRequest deleteWatchRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(deleteWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request xpackUsage(XPackUsageRequest usageRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage");
|
||||
Params parameters = new Params(request);
|
||||
parameters.withMasterTimeout(usageRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getLifecyclePolicy(GetLifecyclePolicyRequest getLifecyclePolicyRequest) {
|
||||
params.withTimeout(deleteStoredScriptRequeststatic Request getLifecyclePolicy(GetLifecyclePolicyRequest getLifecyclePolicyRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_ilm")
|
||||
.addCommaSeparatedPathParts(getLifecyclePolicyRequest.getPolicyNames())
|
||||
@ -1265,19 +1028,10 @@ final class RequestConverters {
|
||||
params.withMasterTimeout(explainLifecycleRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) {
|
||||
EndpointBuilder endpointBuilder = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/migration/assistance")
|
||||
.addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices());
|
||||
String endpoint = endpointBuilder.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params parameters = new Params(request);
|
||||
parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions());
|
||||
.timeout());est.masterNodeTimeout());static HttpEntity cre
|
||||
return request;
|
||||
}
|
||||
|
||||
static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
|
||||
stion {
|
||||
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
|
||||
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
@ -121,36 +122,36 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedCardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedCardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue;
|
||||
@ -1062,6 +1063,35 @@ public class RestHighLevelClient implements Closeable {
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts an stored script using the Scripting API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html"> Scripting API
|
||||
* on elastic.co</a>
|
||||
* @param putStoredScriptRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse putScript(PutStoredScriptRequest putStoredScriptRequest,
|
||||
RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(putStoredScriptRequest, RequestConverters::putScript, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously puts an stored script using the Scripting API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html"> Scripting API
|
||||
* on elastic.co</a>
|
||||
* @param putStoredScriptRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void putScriptAsync(PutStoredScriptRequest putStoredScriptRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(putStoredScriptRequest, RequestConverters::putScript, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Field Capabilities API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-field-caps.html">Field Capabilities API
|
||||
|
@ -20,8 +20,11 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserResponse;
|
||||
import org.elasticsearch.client.security.EmptyResponse;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -66,4 +69,60 @@ public final class SecurityClient {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::putUser, options,
|
||||
PutUserResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable a native realm or built-in user synchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to enable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response from the enable user call
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public EmptyResponse enableUser(EnableUserRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::enableUser, options,
|
||||
EmptyResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable a native realm or built-in user asynchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to enable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void enableUserAsync(EnableUserRequest request, RequestOptions options,
|
||||
ActionListener<EmptyResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::enableUser, options,
|
||||
EmptyResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable a native realm or built-in user synchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to disable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response from the enable user call
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public EmptyResponse disableUser(DisableUserRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::disableUser, options,
|
||||
EmptyResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable a native realm or built-in user asynchronously.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
||||
* the docs</a> for more.
|
||||
* @param request the request with the user to disable
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void disableUserAsync(DisableUserRequest request, RequestOptions options,
|
||||
ActionListener<EmptyResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::disableUser, options,
|
||||
EmptyResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
@ -20,14 +20,17 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.SetUserEnabledRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
|
||||
import static org.elasticsearch.client.RequestConverters.createEntity;
|
||||
|
||||
public final class SecurityRequestConverters {
|
||||
final class SecurityRequestConverters {
|
||||
|
||||
private SecurityRequestConverters() {}
|
||||
|
||||
@ -42,4 +45,24 @@ public final class SecurityRequestConverters {
|
||||
params.withRefreshPolicy(putUserRequest.getRefreshPolicy());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request enableUser(EnableUserRequest enableUserRequest) {
|
||||
return setUserEnabled(enableUserRequest);
|
||||
}
|
||||
|
||||
static Request disableUser(DisableUserRequest disableUserRequest) {
|
||||
return setUserEnabled(disableUserRequest);
|
||||
}
|
||||
|
||||
private static Request setUserEnabled(SetUserEnabledRequest setUserEnabledRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack/security/user")
|
||||
.addPathPart(setUserEnabledRequest.getUsername())
|
||||
.addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable")
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy());
|
||||
return request;
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, SnapshotRequestConverters::getRepositories, options,
|
||||
GetRepositoriesResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -80,7 +80,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options,
|
||||
ActionListener<GetRepositoriesResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, SnapshotRequestConverters::getRepositories, options,
|
||||
GetRepositoriesResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -94,7 +94,7 @@ public final class SnapshotClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse createRepository(PutRepositoryRequest putRepositoryRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, SnapshotRequestConverters::createRepository, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -108,7 +108,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void createRepositoryAsync(PutRepositoryRequest putRepositoryRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, SnapshotRequestConverters::createRepository, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -123,8 +123,8 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public AcknowledgedResponse deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, SnapshotRequestConverters::deleteRepository,
|
||||
options, AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -137,7 +137,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void deleteRepositoryAsync(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest, SnapshotRequestConverters::deleteRepository, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -152,8 +152,8 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public VerifyRepositoryResponse verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options,
|
||||
VerifyRepositoryResponse::fromXContent, emptySet());
|
||||
return restHighLevelClient.performRequestAndParseEntity(verifyRepositoryRequest, SnapshotRequestConverters::verifyRepository,
|
||||
options, VerifyRepositoryResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -166,7 +166,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options,
|
||||
ActionListener<VerifyRepositoryResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, SnapshotRequestConverters::verifyRepository, options,
|
||||
VerifyRepositoryResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -178,7 +178,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, SnapshotRequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -190,7 +190,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
|
||||
ActionListener<CreateSnapshotResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, SnapshotRequestConverters::createSnapshot, options,
|
||||
CreateSnapshotResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -205,7 +205,7 @@ public final class SnapshotClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, SnapshotRequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -219,7 +219,7 @@ public final class SnapshotClient {
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options, ActionListener<GetSnapshotsResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, SnapshotRequestConverters::getSnapshots, options,
|
||||
GetSnapshotsResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -234,7 +234,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options)
|
||||
throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, SnapshotRequestConverters::snapshotsStatus, options,
|
||||
SnapshotsStatusResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -248,7 +248,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options,
|
||||
ActionListener<SnapshotsStatusResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, SnapshotRequestConverters::snapshotsStatus, options,
|
||||
SnapshotsStatusResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -263,7 +263,7 @@ public final class SnapshotClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public RestoreSnapshotResponse restore(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, SnapshotRequestConverters::restoreSnapshot, options,
|
||||
RestoreSnapshotResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -278,7 +278,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void restoreAsync(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options,
|
||||
ActionListener<RestoreSnapshotResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest, SnapshotRequestConverters::restoreSnapshot, options,
|
||||
RestoreSnapshotResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -293,7 +293,7 @@ public final class SnapshotClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public AcknowledgedResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, SnapshotRequestConverters::deleteSnapshot, options,
|
||||
AcknowledgedResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -308,7 +308,7 @@ public final class SnapshotClient {
|
||||
*/
|
||||
public void deleteAsync(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options,
|
||||
ActionListener<AcknowledgedResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, SnapshotRequestConverters::deleteSnapshot, options,
|
||||
AcknowledgedResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,162 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class SnapshotRequestConverters {
|
||||
|
||||
static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) {
|
||||
String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories();
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addCommaSeparatedPathParts(repositories)
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout());
|
||||
parameters.withLocal(getRepositoriesRequest.local());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createRepository(PutRepositoryRequest putRepositoryRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(putRepositoryRequest.timeout());
|
||||
parameters.withVerify(putRepositoryRequest.verify());
|
||||
|
||||
request.setEntity(RequestConverters.createEntity(putRepositoryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addPathPart(deleteRepositoryRequest.name())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(deleteRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(verifyRepositoryRequest.name())
|
||||
.addPathPartAsIs("_verify")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout());
|
||||
parameters.withTimeout(verifyRepositoryRequest.timeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot")
|
||||
.addPathPart(createSnapshotRequest.repository())
|
||||
.addPathPart(createSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||
params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout());
|
||||
params.withWaitForCompletion(createSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(RequestConverters.createEntity(createSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) {
|
||||
RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(getSnapshotsRequest.repository());
|
||||
String endpoint;
|
||||
if (getSnapshotsRequest.snapshots().length == 0) {
|
||||
endpoint = endpointBuilder.addPathPart("_all").build();
|
||||
} else {
|
||||
endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build();
|
||||
}
|
||||
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout());
|
||||
parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable()));
|
||||
parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose()));
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(snapshotsStatusRequest.repository())
|
||||
.addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots())
|
||||
.addPathPartAsIs("_status")
|
||||
.build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
|
||||
parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) throws IOException {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(restoreSnapshotRequest.repository())
|
||||
.addPathPart(restoreSnapshotRequest.snapshot())
|
||||
.addPathPartAsIs("_restore")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout());
|
||||
parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion());
|
||||
request.setEntity(RequestConverters.createEntity(restoreSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
|
||||
.addPathPart(deleteSnapshotRequest.repository())
|
||||
.addPathPart(deleteSnapshotRequest.snapshot())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
}
|
@ -47,7 +47,7 @@ public final class WatcherClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public PutWatchResponse putWatch(PutWatchRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::putWatch, options,
|
||||
PutWatchResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ public final class WatcherClient {
|
||||
*/
|
||||
public void putWatchAsync(PutWatchRequest request, RequestOptions options,
|
||||
ActionListener<PutWatchResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::putWatch, options,
|
||||
PutWatchResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -75,7 +75,7 @@ public final class WatcherClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public DeleteWatchResponse deleteWatch(DeleteWatchRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::deleteWatch, options,
|
||||
DeleteWatchResponse::fromXContent, singleton(404));
|
||||
}
|
||||
|
||||
@ -88,7 +88,7 @@ public final class WatcherClient {
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void deleteWatchAsync(DeleteWatchRequest request, RequestOptions options, ActionListener<DeleteWatchResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::deleteWatch, options,
|
||||
DeleteWatchResponse::fromXContent, listener, singleton(404));
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
|
||||
public class WatcherRequestConverters {
|
||||
|
||||
static Request putWatch(PutWatchRequest putWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(putWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
|
||||
RequestConverters.Params params = new RequestConverters.Params(request).withVersion(putWatchRequest.getVersion());
|
||||
if (putWatchRequest.isActive() == false) {
|
||||
params.putParam("active", "false");
|
||||
}
|
||||
ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType());
|
||||
BytesReference source = putWatchRequest.getSource();
|
||||
request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteWatch(DeleteWatchRequest deleteWatchRequest) {
|
||||
String endpoint = new RequestConverters.EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("watcher")
|
||||
.addPathPartAsIs("watch")
|
||||
.addPathPart(deleteWatchRequest.getId())
|
||||
.build();
|
||||
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
return request;
|
||||
}
|
||||
}
|
@ -56,7 +56,7 @@ public final class XPackClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackInfo, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, XPackRequestConverters::info, options,
|
||||
XPackInfoResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -70,7 +70,7 @@ public final class XPackClient {
|
||||
*/
|
||||
public void infoAsync(XPackInfoRequest request, RequestOptions options,
|
||||
ActionListener<XPackInfoResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, XPackRequestConverters::info, options,
|
||||
XPackInfoResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
@ -81,7 +81,7 @@ public final class XPackClient {
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xpackUsage, options,
|
||||
return restHighLevelClient.performRequestAndParseEntity(request, XPackRequestConverters::usage, options,
|
||||
XPackUsageResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
@ -91,7 +91,7 @@ public final class XPackClient {
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener<XPackUsageResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options,
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, XPackRequestConverters::usage, options,
|
||||
XPackUsageResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackUsageRequest;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class XPackRequestConverters {
|
||||
|
||||
static Request info(XPackInfoRequest infoRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack");
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
request.addParameter("human", "false");
|
||||
}
|
||||
if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) {
|
||||
request.addParameter("categories", infoRequest.getCategories().stream()
|
||||
.map(c -> c.toString().toLowerCase(Locale.ROOT))
|
||||
.collect(Collectors.joining(",")));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request usage(XPackUsageRequest usageRequest) {
|
||||
Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage");
|
||||
RequestConverters.Params parameters = new RequestConverters.Params(request);
|
||||
parameters.withMasterTimeout(usageRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
}
|
@ -0,0 +1,140 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Pojo for forecasting an existing and open Machine Learning Job
|
||||
*/
|
||||
public class ForecastJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField DURATION = new ParseField("duration");
|
||||
public static final ParseField EXPIRES_IN = new ParseField("expires_in");
|
||||
|
||||
public static final ConstructingObjectParser<ForecastJobRequest, Void> PARSER =
|
||||
new ConstructingObjectParser<>("forecast_job_request", (a) -> new ForecastJobRequest((String)a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareString(
|
||||
(request, val) -> request.setDuration(TimeValue.parseTimeValue(val, DURATION.getPreferredName())), DURATION);
|
||||
PARSER.declareString(
|
||||
(request, val) -> request.setExpiresIn(TimeValue.parseTimeValue(val, EXPIRES_IN.getPreferredName())), EXPIRES_IN);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private TimeValue duration;
|
||||
private TimeValue expiresIn;
|
||||
|
||||
/**
|
||||
* A new forecast request
|
||||
*
|
||||
* @param jobId the non-null, existing, and opened jobId to forecast
|
||||
*/
|
||||
public ForecastJobRequest(String jobId) {
|
||||
this.jobId = jobId;
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public TimeValue getDuration() {
|
||||
return duration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the forecast duration
|
||||
*
|
||||
* A period of time that indicates how far into the future to forecast.
|
||||
* The default value is 1 day. The forecast starts at the last record that was processed.
|
||||
*
|
||||
* @param duration TimeValue for the duration of the forecast
|
||||
*/
|
||||
public void setDuration(TimeValue duration) {
|
||||
this.duration = duration;
|
||||
}
|
||||
|
||||
public TimeValue getExpiresIn() {
|
||||
return expiresIn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the forecast expiration
|
||||
*
|
||||
* The period of time that forecast results are retained.
|
||||
* After a forecast expires, the results are deleted. The default value is 14 days.
|
||||
* If set to a value of 0, the forecast is never automatically deleted.
|
||||
*
|
||||
* @param expiresIn TimeValue for the forecast expiration
|
||||
*/
|
||||
public void setExpiresIn(TimeValue expiresIn) {
|
||||
this.expiresIn = expiresIn;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(jobId, duration, expiresIn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ForecastJobRequest other = (ForecastJobRequest) obj;
|
||||
return Objects.equals(jobId, other.jobId)
|
||||
&& Objects.equals(duration, other.duration)
|
||||
&& Objects.equals(expiresIn, other.expiresIn);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
if (duration != null) {
|
||||
builder.field(DURATION.getPreferredName(), duration.getStringRep());
|
||||
}
|
||||
if (expiresIn != null) {
|
||||
builder.field(EXPIRES_IN.getPreferredName(), expiresIn.getStringRep());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
}
|
@ -0,0 +1,102 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Forecast response object
|
||||
*/
|
||||
public class ForecastJobResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged");
|
||||
public static final ParseField FORECAST_ID = new ParseField("forecast_id");
|
||||
|
||||
public static final ConstructingObjectParser<ForecastJobResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("forecast_job_response",
|
||||
true,
|
||||
(a) -> new ForecastJobResponse((Boolean)a[0], (String)a[1]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID);
|
||||
}
|
||||
|
||||
public static ForecastJobResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
private final boolean acknowledged;
|
||||
private final String forecastId;
|
||||
|
||||
public ForecastJobResponse(boolean acknowledged, String forecastId) {
|
||||
this.acknowledged = acknowledged;
|
||||
this.forecastId = forecastId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Forecast creating acknowledgement
|
||||
* @return {@code true} indicates success, {@code false} otherwise
|
||||
*/
|
||||
public boolean isAcknowledged() {
|
||||
return acknowledged;
|
||||
}
|
||||
|
||||
/**
|
||||
* The created forecast ID
|
||||
*/
|
||||
public String getForecastId() {
|
||||
return forecastId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(acknowledged, forecastId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ForecastJobResponse other = (ForecastJobResponse) obj;
|
||||
return Objects.equals(acknowledged, other.acknowledged)
|
||||
&& Objects.equals(forecastId, other.forecastId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged);
|
||||
builder.field(FORECAST_ID.getPreferredName(), forecastId);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
@ -0,0 +1,229 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* POJO for posting data to a Machine Learning job
|
||||
*/
|
||||
public class PostDataRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField RESET_START = new ParseField("reset_start");
|
||||
public static final ParseField RESET_END = new ParseField("reset_end");
|
||||
public static final ParseField CONTENT_TYPE = new ParseField("content_type");
|
||||
|
||||
public static final ConstructingObjectParser<PostDataRequest, Void> PARSER =
|
||||
new ConstructingObjectParser<>("post_data_request",
|
||||
(a) -> new PostDataRequest((String)a[0], XContentType.fromMediaTypeOrFormat((String)a[1]), new byte[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
|
||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), CONTENT_TYPE);
|
||||
PARSER.declareStringOrNull(PostDataRequest::setResetEnd, RESET_END);
|
||||
PARSER.declareStringOrNull(PostDataRequest::setResetStart, RESET_START);
|
||||
}
|
||||
|
||||
private final String jobId;
|
||||
private final XContentType xContentType;
|
||||
private final BytesReference content;
|
||||
private String resetStart;
|
||||
private String resetEnd;
|
||||
|
||||
/**
|
||||
* Create a new PostDataRequest object
|
||||
*
|
||||
* @param jobId non-null jobId of the job to post data to
|
||||
* @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported
|
||||
* @param content bulk serialized content in the format of the passed {@link XContentType}
|
||||
*/
|
||||
public PostDataRequest(String jobId, XContentType xContentType, BytesReference content) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "job_id must not be null");
|
||||
this.xContentType = Objects.requireNonNull(xContentType, "content_type must not be null");
|
||||
this.content = Objects.requireNonNull(content, "content must not be null");
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new PostDataRequest object referencing the passed {@code byte[]} content
|
||||
*
|
||||
* @param jobId non-null jobId of the job to post data to
|
||||
* @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported
|
||||
* @param content bulk serialized content in the format of the passed {@link XContentType}
|
||||
*/
|
||||
public PostDataRequest(String jobId, XContentType xContentType, byte[] content) {
|
||||
this(jobId, xContentType, new BytesArray(content));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new PostDataRequest object referencing the passed {@link JsonBuilder} object
|
||||
*
|
||||
* @param jobId non-null jobId of the job to post data to
|
||||
* @param builder {@link JsonBuilder} object containing documents to be serialized and sent in {@link XContentType#JSON} format
|
||||
*/
|
||||
public PostDataRequest(String jobId, JsonBuilder builder) {
|
||||
this(jobId, XContentType.JSON, builder.build());
|
||||
}
|
||||
|
||||
public String getJobId() {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
public String getResetStart() {
|
||||
return resetStart;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the start of the bucket resetting range
|
||||
*
|
||||
* @param resetStart String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string
|
||||
*/
|
||||
public void setResetStart(String resetStart) {
|
||||
this.resetStart = resetStart;
|
||||
}
|
||||
|
||||
public String getResetEnd() {
|
||||
return resetEnd;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the end of the bucket resetting range
|
||||
*
|
||||
* @param resetEnd String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string
|
||||
*/
|
||||
public void setResetEnd(String resetEnd) {
|
||||
this.resetEnd = resetEnd;
|
||||
}
|
||||
|
||||
public BytesReference getContent() {
|
||||
return content;
|
||||
}
|
||||
|
||||
public XContentType getXContentType() {
|
||||
return xContentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
//We leave out the content for server side parity
|
||||
return Objects.hash(jobId, resetStart, resetEnd, xContentType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//We leave out the content for server side parity
|
||||
PostDataRequest other = (PostDataRequest) obj;
|
||||
return Objects.equals(jobId, other.jobId) &&
|
||||
Objects.equals(resetStart, other.resetStart) &&
|
||||
Objects.equals(resetEnd, other.resetEnd) &&
|
||||
Objects.equals(xContentType, other.xContentType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Job.ID.getPreferredName(), jobId);
|
||||
builder.field(CONTENT_TYPE.getPreferredName(), xContentType.mediaType());
|
||||
if (resetEnd != null) {
|
||||
builder.field(RESET_END.getPreferredName(), resetEnd);
|
||||
}
|
||||
if (resetStart != null) {
|
||||
builder.field(RESET_START.getPreferredName(), resetStart);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for incrementally building a bulk document request in {@link XContentType#JSON} format
|
||||
*/
|
||||
public static class JsonBuilder {
|
||||
|
||||
private final List<ByteBuffer> bytes = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Add a document via a {@code byte[]} array
|
||||
*
|
||||
* @param doc {@code byte[]} array of a serialized JSON object
|
||||
*/
|
||||
public JsonBuilder addDoc(byte[] doc) {
|
||||
bytes.add(ByteBuffer.wrap(doc));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a document via a serialized JSON String
|
||||
*
|
||||
* @param doc a serialized JSON String
|
||||
*/
|
||||
public JsonBuilder addDoc(String doc) {
|
||||
bytes.add(ByteBuffer.wrap(doc.getBytes(StandardCharsets.UTF_8)));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a document via an object map
|
||||
*
|
||||
* @param doc document object to add to bulk request
|
||||
* @throws IOException on parsing/serialization errors
|
||||
*/
|
||||
public JsonBuilder addDoc(Map<String, Object> doc) throws IOException {
|
||||
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
|
||||
builder.map(doc);
|
||||
bytes.add(ByteBuffer.wrap(BytesReference.toBytes(BytesReference.bytes(builder))));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private BytesReference build() {
|
||||
ByteBuffer[] buffers = bytes.toArray(new ByteBuffer[bytes.size()]);
|
||||
return BytesReference.fromByteBuffers(buffers);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,74 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.client.ml.job.process.DataCounts;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response object when posting data to a Machine Learning Job
|
||||
*/
|
||||
public class PostDataResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private DataCounts dataCounts;
|
||||
|
||||
public static PostDataResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return new PostDataResponse(DataCounts.PARSER.parse(parser, null));
|
||||
}
|
||||
|
||||
public PostDataResponse(DataCounts counts) {
|
||||
this.dataCounts = counts;
|
||||
}
|
||||
|
||||
public DataCounts getDataCounts() {
|
||||
return dataCounts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
return dataCounts.toXContent(builder, params);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hashCode(dataCounts);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PostDataResponse other = (PostDataResponse) obj;
|
||||
return Objects.equals(dataCounts, other.dataCounts);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
/**
|
||||
* Request object to disable a native realm or built-in user.
|
||||
*/
|
||||
public final class DisableUserRequest extends SetUserEnabledRequest {
|
||||
|
||||
public DisableUserRequest(String username, RefreshPolicy refreshPolicy) {
|
||||
super(false, username, refreshPolicy);
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Response for a request which simply returns an empty object.
|
||||
*/
|
||||
public final class EmptyResponse {
|
||||
|
||||
private static final ObjectParser<EmptyResponse, Void> PARSER = new ObjectParser<>("empty_response", false, EmptyResponse::new);
|
||||
|
||||
public static EmptyResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
/**
|
||||
* Request object to enable a native realm or built-in user.
|
||||
*/
|
||||
public final class EnableUserRequest extends SetUserEnabledRequest {
|
||||
|
||||
public EnableUserRequest(String username, RefreshPolicy refreshPolicy) {
|
||||
super(true, username, refreshPolicy);
|
||||
}
|
||||
}
|
@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.client.Validatable;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Abstract request object to enable or disable a built-in or native user.
|
||||
*/
|
||||
public abstract class SetUserEnabledRequest implements Validatable {
|
||||
|
||||
private final boolean enabled;
|
||||
private final String username;
|
||||
private final RefreshPolicy refreshPolicy;
|
||||
|
||||
SetUserEnabledRequest(boolean enabled, String username, RefreshPolicy refreshPolicy) {
|
||||
this.enabled = enabled;
|
||||
this.username = Objects.requireNonNull(username, "username is required");
|
||||
this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy;
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
public String getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
public RefreshPolicy getRefreshPolicy() {
|
||||
return refreshPolicy;
|
||||
}
|
||||
}
|
@ -130,7 +130,8 @@ public abstract class ESRestHighLevelClientTestCase extends ESRestTestCase {
|
||||
}
|
||||
|
||||
protected static void createPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
|
||||
assertOK(client().performRequest(RequestConverters.putPipeline(putPipelineRequest)));
|
||||
assertTrue(execute(
|
||||
putPipelineRequest, highLevelClient().ingest()::putPipeline, highLevelClient().ingest()::putPipelineAsync).isAcknowledged());
|
||||
}
|
||||
|
||||
protected static void clusterUpdateSettings(Settings persistentSettings,
|
||||
|
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.StringJoiner;
|
||||
|
||||
public class IngestRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public void testPutPipeline() throws IOException {
|
||||
String pipelineId = "some_pipeline_id";
|
||||
PutPipelineRequest request = new PutPipelineRequest(
|
||||
"some_pipeline_id",
|
||||
new BytesArray("{}".getBytes(StandardCharsets.UTF_8)),
|
||||
XContentType.JSON
|
||||
);
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
RequestConvertersTests.setRandomMasterTimeout(request, expectedParams);
|
||||
RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
|
||||
Request expectedRequest = IngestRequestConverters.putPipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
endpoint.add(pipelineId);
|
||||
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
Assert.assertEquals(HttpPut.METHOD_NAME, expectedRequest.getMethod());
|
||||
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testGetPipeline() {
|
||||
String pipelineId = "some_pipeline_id";
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
GetPipelineRequest request = new GetPipelineRequest("some_pipeline_id");
|
||||
RequestConvertersTests.setRandomMasterTimeout(request, expectedParams);
|
||||
Request expectedRequest = IngestRequestConverters.getPipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
endpoint.add(pipelineId);
|
||||
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
Assert.assertEquals(HttpGet.METHOD_NAME, expectedRequest.getMethod());
|
||||
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testDeletePipeline() {
|
||||
String pipelineId = "some_pipeline_id";
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
DeletePipelineRequest request = new DeletePipelineRequest(pipelineId);
|
||||
RequestConvertersTests.setRandomMasterTimeout(request, expectedParams);
|
||||
RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
Request expectedRequest = IngestRequestConverters.deletePipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
endpoint.add(pipelineId);
|
||||
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
Assert.assertEquals(HttpDelete.METHOD_NAME, expectedRequest.getMethod());
|
||||
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testSimulatePipeline() throws IOException {
|
||||
String pipelineId = ESTestCase.randomBoolean() ? "some_pipeline_id" : null;
|
||||
boolean verbose = ESTestCase.randomBoolean();
|
||||
String json = "{\"pipeline\":{" +
|
||||
"\"description\":\"_description\"," +
|
||||
"\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]}," +
|
||||
"\"docs\":[{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}]}";
|
||||
SimulatePipelineRequest request = new SimulatePipelineRequest(
|
||||
new BytesArray(json.getBytes(StandardCharsets.UTF_8)),
|
||||
XContentType.JSON
|
||||
);
|
||||
request.setId(pipelineId);
|
||||
request.setVerbose(verbose);
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
|
||||
Request expectedRequest = IngestRequestConverters.simulatePipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
if (pipelineId != null && !pipelineId.isEmpty())
|
||||
endpoint.add(pipelineId);
|
||||
endpoint.add("_simulate");
|
||||
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
Assert.assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod());
|
||||
Assert.assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
RequestConvertersTests.assertToXContentBody(request, expectedRequest.getEntity());
|
||||
}
|
||||
}
|
@ -26,6 +26,7 @@ import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.ml.CloseJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
import org.elasticsearch.client.ml.GetJobRequest;
|
||||
@ -33,6 +34,7 @@ import org.elasticsearch.client.ml.GetJobStatsRequest;
|
||||
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
@ -43,12 +45,15 @@ import org.elasticsearch.client.ml.job.config.JobUpdateTests;
|
||||
import org.elasticsearch.client.ml.job.util.PageParams;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
@ -169,6 +174,21 @@ public class MLRequestConvertersTests extends ESTestCase {
|
||||
requestEntityToString(request));
|
||||
}
|
||||
|
||||
public void testForecastJob() throws Exception {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
ForecastJobRequest forecastJobRequest = new ForecastJobRequest(jobId);
|
||||
|
||||
forecastJobRequest.setDuration(TimeValue.timeValueHours(10));
|
||||
forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(12));
|
||||
Request request = MLRequestConverters.forecastJob(forecastJobRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_forecast", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
ForecastJobRequest parsedRequest = ForecastJobRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(forecastJobRequest));
|
||||
}
|
||||
}
|
||||
|
||||
public void testUpdateJob() throws Exception {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
JobUpdate updates = JobUpdateTests.createRandom(jobId);
|
||||
@ -238,6 +258,34 @@ public class MLRequestConvertersTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testPostData() throws Exception {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder();
|
||||
Map<String, Object> obj = new HashMap<>();
|
||||
obj.put("foo", "bar");
|
||||
jsonBuilder.addDoc(obj);
|
||||
|
||||
PostDataRequest postDataRequest = new PostDataRequest(jobId, jsonBuilder);
|
||||
Request request = MLRequestConverters.postData(postDataRequest);
|
||||
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_data", request.getEndpoint());
|
||||
assertEquals("{\"foo\":\"bar\"}", requestEntityToString(request));
|
||||
assertEquals(postDataRequest.getXContentType().mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue());
|
||||
assertFalse(request.getParameters().containsKey(PostDataRequest.RESET_END.getPreferredName()));
|
||||
assertFalse(request.getParameters().containsKey(PostDataRequest.RESET_START.getPreferredName()));
|
||||
|
||||
PostDataRequest postDataRequest2 = new PostDataRequest(jobId, XContentType.SMILE, new byte[0]);
|
||||
postDataRequest2.setResetStart("2018-08-08T00:00:00Z");
|
||||
postDataRequest2.setResetEnd("2018-09-08T00:00:00Z");
|
||||
|
||||
request = MLRequestConverters.postData(postDataRequest2);
|
||||
|
||||
assertEquals(postDataRequest2.getXContentType().mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue());
|
||||
assertEquals("2018-09-08T00:00:00Z", request.getParameters().get(PostDataRequest.RESET_END.getPreferredName()));
|
||||
assertEquals("2018-08-08T00:00:00Z", request.getParameters().get(PostDataRequest.RESET_START.getPreferredName()));
|
||||
}
|
||||
|
||||
public void testGetInfluencers() throws IOException {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
GetInfluencersRequest getInfluencersRequest = new GetInfluencersRequest(jobId);
|
||||
|
@ -20,6 +20,10 @@ package org.elasticsearch.client;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
import org.elasticsearch.client.ml.job.config.JobUpdate;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
@ -41,13 +45,14 @@ import org.elasticsearch.client.ml.job.config.AnalysisConfig;
|
||||
import org.elasticsearch.client.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.client.ml.job.config.Detector;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.junit.After;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@ -220,6 +225,52 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
||||
assertThat(exception.status().getStatus(), equalTo(404));
|
||||
}
|
||||
|
||||
public void testForecastJob() throws Exception {
|
||||
String jobId = "ml-forecast-job-test";
|
||||
Job job = buildJob(jobId);
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT);
|
||||
|
||||
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
|
||||
for(int i = 0; i < 30; i++) {
|
||||
Map<String, Object> hashMap = new HashMap<>();
|
||||
hashMap.put("total", randomInt(1000));
|
||||
hashMap.put("timestamp", (i+1)*1000);
|
||||
builder.addDoc(hashMap);
|
||||
}
|
||||
PostDataRequest postDataRequest = new PostDataRequest(jobId, builder);
|
||||
machineLearningClient.postData(postDataRequest, RequestOptions.DEFAULT);
|
||||
machineLearningClient.flushJob(new FlushJobRequest(jobId), RequestOptions.DEFAULT);
|
||||
|
||||
ForecastJobRequest request = new ForecastJobRequest(jobId);
|
||||
ForecastJobResponse response = execute(request, machineLearningClient::forecastJob, machineLearningClient::forecastJobAsync);
|
||||
|
||||
assertTrue(response.isAcknowledged());
|
||||
assertNotNull(response.getForecastId());
|
||||
}
|
||||
|
||||
public void testPostData() throws Exception {
|
||||
String jobId = randomValidJobId();
|
||||
Job job = buildJob(jobId);
|
||||
MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
|
||||
machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT);
|
||||
|
||||
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
|
||||
for(int i = 0; i < 10; i++) {
|
||||
Map<String, Object> hashMap = new HashMap<>();
|
||||
hashMap.put("total", randomInt(1000));
|
||||
hashMap.put("timestamp", (i+1)*1000);
|
||||
builder.addDoc(hashMap);
|
||||
}
|
||||
PostDataRequest postDataRequest = new PostDataRequest(jobId, builder);
|
||||
|
||||
PostDataResponse response = execute(postDataRequest, machineLearningClient::postData, machineLearningClient::postDataAsync);
|
||||
assertEquals(10, response.getDataCounts().getInputRecordCount());
|
||||
assertEquals(0, response.getDataCounts().getOutOfOrderTimeStampCount());
|
||||
}
|
||||
|
||||
public void testUpdateJob() throws Exception {
|
||||
String jobId = randomValidJobId();
|
||||
Job job = buildJob(jobId);
|
||||
@ -256,8 +307,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
||||
builder.setAnalysisConfig(configBuilder);
|
||||
|
||||
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||
dataDescription.setTimeFormat(randomFrom(DataDescription.EPOCH_MS, DataDescription.EPOCH));
|
||||
dataDescription.setTimeField(randomAlphaOfLength(10));
|
||||
dataDescription.setTimeFormat(DataDescription.EPOCH_MS);
|
||||
dataDescription.setTimeField("timestamp");
|
||||
builder.setDataDescription(dataDescription);
|
||||
|
||||
return builder.build();
|
||||
|
@ -0,0 +1,48 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class MigrationRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public static void testGetMigrationAssistance() {
|
||||
IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest();
|
||||
String expectedEndpoint = "/_xpack/migration/assistance";
|
||||
if (randomBoolean()) {
|
||||
String[] indices = RequestConvertersTests.randomIndicesNames(1, 5);
|
||||
upgradeInfoRequest.indices(indices);
|
||||
expectedEndpoint += "/" + String.join(",", indices);
|
||||
}
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
RequestConvertersTests.setRandomIndicesOptions(upgradeInfoRequest::indicesOptions, upgradeInfoRequest::indicesOptions,
|
||||
expectedParams);
|
||||
Request request = MigrationRequestConverters.getMigrationAssistance(upgradeInfoRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals(expectedEndpoint, request.getEndpoint());
|
||||
assertNull(request.getEntity());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
}
|
@ -29,17 +29,9 @@ import org.apache.http.entity.ByteArrayEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
@ -74,10 +66,6 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.ingest.DeletePipelineRequest;
|
||||
import org.elasticsearch.action.ingest.GetPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.PutPipelineRequest;
|
||||
import org.elasticsearch.action.ingest.SimulatePipelineRequest;
|
||||
import org.elasticsearch.action.search.ClearScrollRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
@ -102,11 +90,9 @@ import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
@ -128,15 +114,10 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.elasticsearch.index.reindex.ReindexRequest;
|
||||
import org.elasticsearch.index.reindex.RemoteInfo;
|
||||
import org.elasticsearch.index.reindex.UpdateByQueryRequest;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.ExplainLifecycleRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.SetIndexLifecyclePolicyRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.StartILMRequest;
|
||||
import org.elasticsearch.client.indexlifecycle.StopILMRequest;
|
||||
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
@ -155,15 +136,11 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.RandomObjects;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
@ -190,7 +167,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class RequestConvertersTests extends ESTestCase {
|
||||
@ -1835,83 +1811,6 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
assertToXContentBody(resizeRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testPutPipeline() throws IOException {
|
||||
String pipelineId = "some_pipeline_id";
|
||||
PutPipelineRequest request = new PutPipelineRequest(
|
||||
"some_pipeline_id",
|
||||
new BytesArray("{}".getBytes(StandardCharsets.UTF_8)),
|
||||
XContentType.JSON
|
||||
);
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomMasterTimeout(request, expectedParams);
|
||||
setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
|
||||
Request expectedRequest = RequestConverters.putPipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
endpoint.add(pipelineId);
|
||||
assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
assertEquals(HttpPut.METHOD_NAME, expectedRequest.getMethod());
|
||||
assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testGetPipeline() {
|
||||
String pipelineId = "some_pipeline_id";
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
GetPipelineRequest request = new GetPipelineRequest("some_pipeline_id");
|
||||
setRandomMasterTimeout(request, expectedParams);
|
||||
Request expectedRequest = RequestConverters.getPipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
endpoint.add(pipelineId);
|
||||
assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
assertEquals(HttpGet.METHOD_NAME, expectedRequest.getMethod());
|
||||
assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testDeletePipeline() {
|
||||
String pipelineId = "some_pipeline_id";
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
DeletePipelineRequest request = new DeletePipelineRequest(pipelineId);
|
||||
setRandomMasterTimeout(request, expectedParams);
|
||||
setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
Request expectedRequest = RequestConverters.deletePipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
endpoint.add(pipelineId);
|
||||
assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
assertEquals(HttpDelete.METHOD_NAME, expectedRequest.getMethod());
|
||||
assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
}
|
||||
|
||||
public void testSimulatePipeline() throws IOException {
|
||||
String pipelineId = randomBoolean() ? "some_pipeline_id" : null;
|
||||
boolean verbose = randomBoolean();
|
||||
String json = "{\"pipeline\":{" +
|
||||
"\"description\":\"_description\"," +
|
||||
"\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]}," +
|
||||
"\"docs\":[{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}]}";
|
||||
SimulatePipelineRequest request = new SimulatePipelineRequest(
|
||||
new BytesArray(json.getBytes(StandardCharsets.UTF_8)),
|
||||
XContentType.JSON
|
||||
);
|
||||
request.setId(pipelineId);
|
||||
request.setVerbose(verbose);
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
|
||||
Request expectedRequest = RequestConverters.simulatePipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
if (pipelineId != null && !pipelineId.isEmpty())
|
||||
endpoint.add(pipelineId);
|
||||
endpoint.add("_simulate");
|
||||
assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod());
|
||||
assertEquals(expectedParams, expectedRequest.getParameters());
|
||||
assertToXContentBody(request, expectedRequest.getEntity());
|
||||
}
|
||||
|
||||
public void testRollover() throws IOException {
|
||||
RolloverRequest rolloverRequest = new RolloverRequest(randomAlphaOfLengthBetween(3, 10),
|
||||
randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10));
|
||||
@ -2006,229 +1905,6 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
|
||||
public void testGetRepositories() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
StringBuilder endpoint = new StringBuilder("/_snapshot");
|
||||
|
||||
GetRepositoriesRequest getRepositoriesRequest = new GetRepositoriesRequest();
|
||||
setRandomMasterTimeout(getRepositoriesRequest, expectedParams);
|
||||
setRandomLocal(getRepositoriesRequest, expectedParams);
|
||||
|
||||
if (randomBoolean()) {
|
||||
String[] entries = new String[] { "a", "b", "c" };
|
||||
getRepositoriesRequest.repositories(entries);
|
||||
endpoint.append("/" + String.join(",", entries));
|
||||
}
|
||||
|
||||
Request request = RequestConverters.getRepositories(getRepositoriesRequest);
|
||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
}
|
||||
|
||||
public void testCreateRepository() throws IOException {
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String endpoint = "/_snapshot/" + repository;
|
||||
Path repositoryLocation = PathUtils.get(".");
|
||||
PutRepositoryRequest putRepositoryRequest = new PutRepositoryRequest(repository);
|
||||
putRepositoryRequest.type(FsRepository.TYPE);
|
||||
putRepositoryRequest.verify(randomBoolean());
|
||||
|
||||
putRepositoryRequest.settings(
|
||||
Settings.builder()
|
||||
.put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation)
|
||||
.put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean())
|
||||
.put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES)
|
||||
.build());
|
||||
|
||||
Request request = RequestConverters.createRepository(putRepositoryRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertToXContentBody(putRepositoryRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testDeleteRepository() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
|
||||
StringBuilder endpoint = new StringBuilder("/_snapshot/" + repository);
|
||||
|
||||
DeleteRepositoryRequest deleteRepositoryRequest = new DeleteRepositoryRequest();
|
||||
deleteRepositoryRequest.name(repository);
|
||||
setRandomMasterTimeout(deleteRepositoryRequest, expectedParams);
|
||||
setRandomTimeout(deleteRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
|
||||
Request request = RequestConverters.deleteRepository(deleteRepositoryRequest);
|
||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||
assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testVerifyRepository() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String endpoint = "/_snapshot/" + repository + "/_verify";
|
||||
|
||||
VerifyRepositoryRequest verifyRepositoryRequest = new VerifyRepositoryRequest(repository);
|
||||
setRandomMasterTimeout(verifyRepositoryRequest, expectedParams);
|
||||
setRandomTimeout(verifyRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
|
||||
Request request = RequestConverters.verifyRepository(verifyRepositoryRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
}
|
||||
|
||||
public void testCreateSnapshot() throws IOException {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0];
|
||||
String endpoint = "/_snapshot/" + repository + "/" + snapshot;
|
||||
|
||||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot);
|
||||
setRandomMasterTimeout(createSnapshotRequest, expectedParams);
|
||||
Boolean waitForCompletion = randomBoolean();
|
||||
createSnapshotRequest.waitForCompletion(waitForCompletion);
|
||||
|
||||
if (waitForCompletion) {
|
||||
expectedParams.put("wait_for_completion", waitForCompletion.toString());
|
||||
}
|
||||
|
||||
Request request = RequestConverters.createSnapshot(createSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertToXContentBody(createSnapshotRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot1 = "snapshot1-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
String snapshot2 = "snapshot2-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s,%s", repository, snapshot1, snapshot2);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest();
|
||||
getSnapshotsRequest.repository(repository);
|
||||
getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0]));
|
||||
setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
if (randomBoolean()) {
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
} else {
|
||||
expectedParams.put("ignore_unavailable", Boolean.FALSE.toString());
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
} else {
|
||||
expectedParams.put("verbose", Boolean.TRUE.toString());
|
||||
}
|
||||
|
||||
Request request = RequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetAllSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository);
|
||||
setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
|
||||
Request request = RequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testSnapshotsStatus() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String[] snapshots = randomIndicesNames(1, 5);
|
||||
StringBuilder snapshotNames = new StringBuilder(snapshots[0]);
|
||||
for (int idx = 1; idx < snapshots.length; idx++) {
|
||||
snapshotNames.append(",").append(snapshots[idx]);
|
||||
}
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status";
|
||||
|
||||
SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots);
|
||||
setRandomMasterTimeout(snapshotsStatusRequest, expectedParams);
|
||||
snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
|
||||
Request request = RequestConverters.snapshotsStatus(snapshotsStatusRequest);
|
||||
assertThat(request.getEndpoint(), equalTo(endpoint));
|
||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
assertThat(request.getEntity(), is(nullValue()));
|
||||
}
|
||||
|
||||
public void testRestoreSnapshot() throws IOException {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s/_restore", repository, snapshot);
|
||||
|
||||
RestoreSnapshotRequest restoreSnapshotRequest = new RestoreSnapshotRequest(repository, snapshot);
|
||||
setRandomMasterTimeout(restoreSnapshotRequest, expectedParams);
|
||||
if (randomBoolean()) {
|
||||
restoreSnapshotRequest.waitForCompletion(true);
|
||||
expectedParams.put("wait_for_completion", "true");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String timeout = randomTimeValue();
|
||||
restoreSnapshotRequest.masterNodeTimeout(timeout);
|
||||
expectedParams.put("master_timeout", timeout);
|
||||
}
|
||||
|
||||
Request request = RequestConverters.restoreSnapshot(restoreSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertToXContentBody(restoreSnapshotRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testDeleteSnapshot() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s", repository, snapshot);
|
||||
|
||||
DeleteSnapshotRequest deleteSnapshotRequest = new DeleteSnapshotRequest();
|
||||
deleteSnapshotRequest.repository(repository);
|
||||
deleteSnapshotRequest.snapshot(snapshot);
|
||||
setRandomMasterTimeout(deleteSnapshotRequest, expectedParams);
|
||||
|
||||
Request request = RequestConverters.deleteSnapshot(deleteSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testPutTemplateRequest() throws Exception {
|
||||
Map<String, String> names = new HashMap<>();
|
||||
names.put("log", "log");
|
||||
@ -2322,6 +1998,42 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testPutScript() throws Exception {
|
||||
PutStoredScriptRequest putStoredScriptRequest = new PutStoredScriptRequest();
|
||||
|
||||
String id = randomAlphaOfLengthBetween(5, 10);
|
||||
putStoredScriptRequest.id(id);
|
||||
|
||||
XContentType xContentType = randomFrom(XContentType.values());
|
||||
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
|
||||
builder.startObject();
|
||||
builder.startObject("script")
|
||||
.field("lang", "painless")
|
||||
.field("source", "Math.log(_score * 2) + params.multiplier")
|
||||
.endObject();
|
||||
builder.endObject();
|
||||
|
||||
putStoredScriptRequest.content(BytesReference.bytes(builder), xContentType);
|
||||
}
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomMasterTimeout(putStoredScriptRequest, expectedParams);
|
||||
setRandomTimeout(putStoredScriptRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
|
||||
if (randomBoolean()) {
|
||||
String context = randomAlphaOfLengthBetween(5, 10);
|
||||
putStoredScriptRequest.context(context);
|
||||
expectedParams.put("context", context);
|
||||
}
|
||||
|
||||
Request request = RequestConverters.putScript(putStoredScriptRequest);
|
||||
|
||||
assertThat(request.getEndpoint(), equalTo("/_scripts/" + id));
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
assertNotNull(request.getEntity());
|
||||
assertToXContentBody(putStoredScriptRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testAnalyzeRequest() throws Exception {
|
||||
AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest()
|
||||
.text("Here is some text")
|
||||
@ -2506,83 +2218,6 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
+ "previous requests have content-type [" + xContentType + "]", exception.getMessage());
|
||||
}
|
||||
|
||||
public void testXPackInfo() {
|
||||
XPackInfoRequest infoRequest = new XPackInfoRequest();
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
infoRequest.setVerbose(randomBoolean());
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
expectedParams.put("human", "false");
|
||||
}
|
||||
int option = between(0, 2);
|
||||
switch (option) {
|
||||
case 0:
|
||||
infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
||||
break;
|
||||
case 1:
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES));
|
||||
expectedParams.put("categories", "features");
|
||||
break;
|
||||
case 2:
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD));
|
||||
expectedParams.put("categories", "build,features");
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid option [" + option + "]");
|
||||
}
|
||||
|
||||
Request request = RequestConverters.xPackInfo(infoRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack", request.getEndpoint());
|
||||
assertNull(request.getEntity());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
|
||||
public void testGetMigrationAssistance() {
|
||||
IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest();
|
||||
String expectedEndpoint = "/_xpack/migration/assistance";
|
||||
if (randomBoolean()) {
|
||||
String[] indices = randomIndicesNames(1, 5);
|
||||
upgradeInfoRequest.indices(indices);
|
||||
expectedEndpoint += "/" + String.join(",", indices);
|
||||
}
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomIndicesOptions(upgradeInfoRequest::indicesOptions, upgradeInfoRequest::indicesOptions, expectedParams);
|
||||
Request request = RequestConverters.getMigrationAssistance(upgradeInfoRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals(expectedEndpoint, request.getEndpoint());
|
||||
assertNull(request.getEntity());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
|
||||
public void testXPackPutWatch() throws Exception {
|
||||
PutWatchRequest putWatchRequest = new PutWatchRequest();
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
putWatchRequest.setId(watchId);
|
||||
String body = randomAlphaOfLength(20);
|
||||
putWatchRequest.setSource(new BytesArray(body), XContentType.JSON);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
putWatchRequest.setActive(false);
|
||||
expectedParams.put("active", "false");
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
long version = randomLongBetween(10, 100);
|
||||
putWatchRequest.setVersion(version);
|
||||
expectedParams.put("version", String.valueOf(version));
|
||||
}
|
||||
|
||||
Request request = RequestConverters.xPackWatcherPutWatch(putWatchRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters()));
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
request.getEntity().writeTo(bos);
|
||||
assertThat(bos.toString("UTF-8"), is(body));
|
||||
}
|
||||
|
||||
public void testGetLifecyclePolicy() {
|
||||
String[] policies = rarely() ? null : randomIndicesNames(0, 10);
|
||||
GetLifecyclePolicyRequest req = new GetLifecyclePolicyRequest(policies);
|
||||
@ -2694,17 +2329,6 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
}
|
||||
|
||||
public void testXPackDeleteWatch() {
|
||||
DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest();
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
deleteWatchRequest.setId(watchId);
|
||||
|
||||
Request request = RequestConverters.xPackWatcherDeleteWatch(deleteWatchRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Randomize the {@link FetchSourceContext} request parameters.
|
||||
*/
|
||||
@ -2767,8 +2391,8 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
private static void setRandomIndicesOptions(Consumer<IndicesOptions> setter, Supplier<IndicesOptions> getter,
|
||||
Map<String, String> expectedParams) {
|
||||
static void setRandomIndicesOptions(Consumer<IndicesOptions> setter, Supplier<IndicesOptions> getter,
|
||||
Map<String, String> expectedParams) {
|
||||
|
||||
if (randomBoolean()) {
|
||||
setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
|
||||
|
@ -672,7 +672,6 @@ public class RestHighLevelClientTests extends ESTestCase {
|
||||
"indices.get_upgrade",
|
||||
"indices.put_alias",
|
||||
"mtermvectors",
|
||||
"put_script",
|
||||
"reindex_rethrottle",
|
||||
"render_search_template",
|
||||
"scripts_painless_execute",
|
||||
|
@ -20,6 +20,8 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.RefreshPolicy;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
@ -53,12 +55,7 @@ public class SecurityRequestConvertersTests extends ESTestCase {
|
||||
}
|
||||
|
||||
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
|
||||
final Map<String, String> expectedParams;
|
||||
if (refreshPolicy != RefreshPolicy.NONE) {
|
||||
expectedParams = Collections.singletonMap("refresh", refreshPolicy.getValue());
|
||||
} else {
|
||||
expectedParams = Collections.emptyMap();
|
||||
}
|
||||
final Map<String, String> expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy);
|
||||
|
||||
PutUserRequest putUserRequest = new PutUserRequest(username, password, roles, fullName, email, enabled, metadata, refreshPolicy);
|
||||
Request request = SecurityRequestConverters.putUser(putUserRequest);
|
||||
@ -67,4 +64,36 @@ public class SecurityRequestConvertersTests extends ESTestCase {
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertToXContentBody(putUserRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testEnableUser() {
|
||||
final String username = randomAlphaOfLengthBetween(1, 12);
|
||||
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
|
||||
final Map<String, String> expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy);
|
||||
EnableUserRequest enableUserRequest = new EnableUserRequest(username, refreshPolicy);
|
||||
Request request = SecurityRequestConverters.enableUser(enableUserRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/security/user/" + username + "/_enable", request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testDisableUser() {
|
||||
final String username = randomAlphaOfLengthBetween(1, 12);
|
||||
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
|
||||
final Map<String, String> expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy);
|
||||
DisableUserRequest disableUserRequest = new DisableUserRequest(username, refreshPolicy);
|
||||
Request request = SecurityRequestConverters.disableUser(disableUserRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/security/user/" + username + "/_disable", request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
private static Map<String, String> getExpectedParamsFromRefreshPolicy(RefreshPolicy refreshPolicy) {
|
||||
if (refreshPolicy != RefreshPolicy.NONE) {
|
||||
return Collections.singletonMap("refresh", refreshPolicy.getValue());
|
||||
} else {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,277 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedRequest;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.repositories.fs.FsRepository;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class SnapshotRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public void testGetRepositories() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
StringBuilder endpoint = new StringBuilder("/_snapshot");
|
||||
|
||||
GetRepositoriesRequest getRepositoriesRequest = new GetRepositoriesRequest();
|
||||
RequestConvertersTests.setRandomMasterTimeout(getRepositoriesRequest, expectedParams);
|
||||
RequestConvertersTests.setRandomLocal(getRepositoriesRequest, expectedParams);
|
||||
|
||||
if (randomBoolean()) {
|
||||
String[] entries = new String[] { "a", "b", "c" };
|
||||
getRepositoriesRequest.repositories(entries);
|
||||
endpoint.append("/" + String.join(",", entries));
|
||||
}
|
||||
|
||||
Request request = SnapshotRequestConverters.getRepositories(getRepositoriesRequest);
|
||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
}
|
||||
|
||||
public void testCreateRepository() throws IOException {
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
String endpoint = "/_snapshot/" + repository;
|
||||
Path repositoryLocation = PathUtils.get(".");
|
||||
PutRepositoryRequest putRepositoryRequest = new PutRepositoryRequest(repository);
|
||||
putRepositoryRequest.type(FsRepository.TYPE);
|
||||
putRepositoryRequest.verify(randomBoolean());
|
||||
|
||||
putRepositoryRequest.settings(
|
||||
Settings.builder()
|
||||
.put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation)
|
||||
.put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean())
|
||||
.put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES)
|
||||
.build());
|
||||
|
||||
Request request = SnapshotRequestConverters.createRepository(putRepositoryRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
RequestConvertersTests.assertToXContentBody(putRepositoryRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testDeleteRepository() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
|
||||
StringBuilder endpoint = new StringBuilder("/_snapshot/" + repository);
|
||||
|
||||
DeleteRepositoryRequest deleteRepositoryRequest = new DeleteRepositoryRequest();
|
||||
deleteRepositoryRequest.name(repository);
|
||||
RequestConvertersTests.setRandomMasterTimeout(deleteRepositoryRequest, expectedParams);
|
||||
RequestConvertersTests.setRandomTimeout(deleteRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
|
||||
Request request = SnapshotRequestConverters.deleteRepository(deleteRepositoryRequest);
|
||||
assertThat(endpoint.toString(), equalTo(request.getEndpoint()));
|
||||
assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testVerifyRepository() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
String endpoint = "/_snapshot/" + repository + "/_verify";
|
||||
|
||||
VerifyRepositoryRequest verifyRepositoryRequest = new VerifyRepositoryRequest(repository);
|
||||
RequestConvertersTests.setRandomMasterTimeout(verifyRepositoryRequest, expectedParams);
|
||||
RequestConvertersTests.setRandomTimeout(verifyRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
|
||||
Request request = SnapshotRequestConverters.verifyRepository(verifyRepositoryRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
}
|
||||
|
||||
public void testCreateSnapshot() throws IOException {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0];
|
||||
String endpoint = "/_snapshot/" + repository + "/" + snapshot;
|
||||
|
||||
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot);
|
||||
RequestConvertersTests.setRandomMasterTimeout(createSnapshotRequest, expectedParams);
|
||||
Boolean waitForCompletion = randomBoolean();
|
||||
createSnapshotRequest.waitForCompletion(waitForCompletion);
|
||||
|
||||
if (waitForCompletion) {
|
||||
expectedParams.put("wait_for_completion", waitForCompletion.toString());
|
||||
}
|
||||
|
||||
Request request = SnapshotRequestConverters.createSnapshot(createSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
RequestConvertersTests.assertToXContentBody(createSnapshotRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
String snapshot1 = "snapshot1-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
String snapshot2 = "snapshot2-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s,%s", repository, snapshot1, snapshot2);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest();
|
||||
getSnapshotsRequest.repository(repository);
|
||||
getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0]));
|
||||
RequestConvertersTests.setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
if (randomBoolean()) {
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
} else {
|
||||
expectedParams.put("ignore_unavailable", Boolean.FALSE.toString());
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
} else {
|
||||
expectedParams.put("verbose", Boolean.TRUE.toString());
|
||||
}
|
||||
|
||||
Request request = SnapshotRequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testGetAllSnapshots() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository);
|
||||
|
||||
GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository);
|
||||
RequestConvertersTests.setRandomMasterTimeout(getSnapshotsRequest, expectedParams);
|
||||
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
|
||||
boolean verbose = randomBoolean();
|
||||
getSnapshotsRequest.verbose(verbose);
|
||||
expectedParams.put("verbose", Boolean.toString(verbose));
|
||||
|
||||
Request request = SnapshotRequestConverters.getSnapshots(getSnapshotsRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
public void testSnapshotsStatus() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
String[] snapshots = RequestConvertersTests.randomIndicesNames(1, 5);
|
||||
StringBuilder snapshotNames = new StringBuilder(snapshots[0]);
|
||||
for (int idx = 1; idx < snapshots.length; idx++) {
|
||||
snapshotNames.append(",").append(snapshots[idx]);
|
||||
}
|
||||
boolean ignoreUnavailable = randomBoolean();
|
||||
String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status";
|
||||
|
||||
SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots);
|
||||
RequestConvertersTests.setRandomMasterTimeout(snapshotsStatusRequest, expectedParams);
|
||||
snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable);
|
||||
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
|
||||
|
||||
Request request = SnapshotRequestConverters.snapshotsStatus(snapshotsStatusRequest);
|
||||
assertThat(request.getEndpoint(), equalTo(endpoint));
|
||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
assertThat(request.getEntity(), is(nullValue()));
|
||||
}
|
||||
|
||||
public void testRestoreSnapshot() throws IOException {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s/_restore", repository, snapshot);
|
||||
|
||||
RestoreSnapshotRequest restoreSnapshotRequest = new RestoreSnapshotRequest(repository, snapshot);
|
||||
RequestConvertersTests.setRandomMasterTimeout(restoreSnapshotRequest, expectedParams);
|
||||
if (randomBoolean()) {
|
||||
restoreSnapshotRequest.waitForCompletion(true);
|
||||
expectedParams.put("wait_for_completion", "true");
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String timeout = randomTimeValue();
|
||||
restoreSnapshotRequest.masterNodeTimeout(timeout);
|
||||
expectedParams.put("master_timeout", timeout);
|
||||
}
|
||||
|
||||
Request request = SnapshotRequestConverters.restoreSnapshot(restoreSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
RequestConvertersTests.assertToXContentBody(restoreSnapshotRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testDeleteSnapshot() {
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||
String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT);
|
||||
|
||||
String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s", repository, snapshot);
|
||||
|
||||
DeleteSnapshotRequest deleteSnapshotRequest = new DeleteSnapshotRequest();
|
||||
deleteSnapshotRequest.repository(repository);
|
||||
deleteSnapshotRequest.snapshot(snapshot);
|
||||
RequestConvertersTests.setRandomMasterTimeout(deleteSnapshotRequest, expectedParams);
|
||||
|
||||
Request request = SnapshotRequestConverters.deleteSnapshot(deleteSnapshotRequest);
|
||||
assertThat(endpoint, equalTo(request.getEndpoint()));
|
||||
assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod()));
|
||||
assertThat(expectedParams, equalTo(request.getParameters()));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
package org.elasticsearch.client;/*
|
||||
package org.elasticsearch.client;
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
@ -17,27 +18,27 @@ package org.elasticsearch.client;/*
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.StoredScriptSource;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
final String id = "calculate-score";
|
||||
private static final String id = "calculate-score";
|
||||
|
||||
public void testGetStoredScript() throws Exception {
|
||||
final StoredScriptSource scriptSource =
|
||||
@ -45,13 +46,9 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||
"Math.log(_score * 2) + params.my_modifier",
|
||||
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()));
|
||||
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Request putRequest = new Request("PUT", "/_scripts/calculate-score");
|
||||
putRequest.setJsonEntity("{\"script\":" + script + "}");
|
||||
Response putResponse = adminClient().performRequest(putRequest);
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
PutStoredScriptRequest request =
|
||||
new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource);
|
||||
assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync));
|
||||
|
||||
GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score");
|
||||
getRequest.masterNodeTimeout("50s");
|
||||
@ -68,22 +65,14 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||
"Math.log(_score * 2) + params.my_modifier",
|
||||
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()));
|
||||
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Request putRequest = new Request("PUT", "/_scripts/" + id);
|
||||
putRequest.setJsonEntity("{\"script\":" + script + "}");
|
||||
Response putResponse = adminClient().performRequest(putRequest);
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
PutStoredScriptRequest request =
|
||||
new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource);
|
||||
assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync));
|
||||
|
||||
DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id);
|
||||
deleteRequest.masterNodeTimeout("50s");
|
||||
deleteRequest.timeout("50s");
|
||||
|
||||
AcknowledgedResponse deleteResponse = execute(deleteRequest, highLevelClient()::deleteScript,
|
||||
highLevelClient()::deleteScriptAsync);
|
||||
|
||||
assertThat(deleteResponse.isAcknowledged(), equalTo(true));
|
||||
assertAcked(execute(deleteRequest, highLevelClient()::deleteScript, highLevelClient()::deleteScriptAsync));
|
||||
|
||||
GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id);
|
||||
|
||||
@ -92,4 +81,21 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||
highLevelClient()::getScriptAsync));
|
||||
assertThat(statusException.status(), equalTo(RestStatus.NOT_FOUND));
|
||||
}
|
||||
|
||||
public void testPutScript() throws Exception {
|
||||
final StoredScriptSource scriptSource =
|
||||
new StoredScriptSource("painless",
|
||||
"Math.log(_score * 2) + params.my_modifier",
|
||||
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()));
|
||||
|
||||
PutStoredScriptRequest request =
|
||||
new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource);
|
||||
assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync));
|
||||
|
||||
Map<String, Object> script = getAsMap("/_scripts/" + id);
|
||||
assertThat(extractValue("_id", script), equalTo(id));
|
||||
assertThat(extractValue("found", script), equalTo(true));
|
||||
assertThat(extractValue("script.lang", script), equalTo("painless"));
|
||||
assertThat(extractValue("script.source", script), equalTo("Math.log(_score * 2) + params.my_modifier"));
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpDelete;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
|
||||
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class WatcherRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public void testPutWatch() throws Exception {
|
||||
PutWatchRequest putWatchRequest = new PutWatchRequest();
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
putWatchRequest.setId(watchId);
|
||||
String body = randomAlphaOfLength(20);
|
||||
putWatchRequest.setSource(new BytesArray(body), XContentType.JSON);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
if (randomBoolean()) {
|
||||
putWatchRequest.setActive(false);
|
||||
expectedParams.put("active", "false");
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
long version = randomLongBetween(10, 100);
|
||||
putWatchRequest.setVersion(version);
|
||||
expectedParams.put("version", String.valueOf(version));
|
||||
}
|
||||
|
||||
Request request = WatcherRequestConverters.putWatch(putWatchRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint());
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters()));
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
request.getEntity().writeTo(bos);
|
||||
assertThat(bos.toString("UTF-8"), is(body));
|
||||
}
|
||||
|
||||
public void testDeleteWatch() {
|
||||
DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest();
|
||||
String watchId = randomAlphaOfLength(10);
|
||||
deleteWatchRequest.setId(watchId);
|
||||
|
||||
Request request = WatcherRequestConverters.deleteWatch(deleteWatchRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint());
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
}
|
@ -0,0 +1,63 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class XPackRequestConvertersTests extends ESTestCase {
|
||||
|
||||
public void testXPackInfo() {
|
||||
XPackInfoRequest infoRequest = new XPackInfoRequest();
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
infoRequest.setVerbose(ESTestCase.randomBoolean());
|
||||
if (false == infoRequest.isVerbose()) {
|
||||
expectedParams.put("human", "false");
|
||||
}
|
||||
int option = ESTestCase.between(0, 2);
|
||||
switch (option) {
|
||||
case 0:
|
||||
infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
||||
break;
|
||||
case 1:
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES));
|
||||
expectedParams.put("categories", "features");
|
||||
break;
|
||||
case 2:
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD));
|
||||
expectedParams.put("categories", "build,features");
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid option [" + option + "]");
|
||||
}
|
||||
|
||||
Request request = XPackRequestConverters.info(infoRequest);
|
||||
Assert.assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
Assert.assertEquals("/_xpack", request.getEndpoint());
|
||||
Assert.assertNull(request.getEntity());
|
||||
Assert.assertEquals(expectedParams, request.getParameters());
|
||||
}
|
||||
}
|
@ -35,6 +35,8 @@ import org.elasticsearch.client.ml.DeleteJobRequest;
|
||||
import org.elasticsearch.client.ml.DeleteJobResponse;
|
||||
import org.elasticsearch.client.ml.FlushJobRequest;
|
||||
import org.elasticsearch.client.ml.FlushJobResponse;
|
||||
import org.elasticsearch.client.ml.ForecastJobRequest;
|
||||
import org.elasticsearch.client.ml.ForecastJobResponse;
|
||||
import org.elasticsearch.client.ml.GetBucketsRequest;
|
||||
import org.elasticsearch.client.ml.GetBucketsResponse;
|
||||
import org.elasticsearch.client.ml.GetInfluencersRequest;
|
||||
@ -49,6 +51,8 @@ import org.elasticsearch.client.ml.GetRecordsRequest;
|
||||
import org.elasticsearch.client.ml.GetRecordsResponse;
|
||||
import org.elasticsearch.client.ml.OpenJobRequest;
|
||||
import org.elasticsearch.client.ml.OpenJobResponse;
|
||||
import org.elasticsearch.client.ml.PostDataRequest;
|
||||
import org.elasticsearch.client.ml.PostDataResponse;
|
||||
import org.elasticsearch.client.ml.PutJobRequest;
|
||||
import org.elasticsearch.client.ml.PutJobResponse;
|
||||
import org.elasticsearch.client.ml.UpdateJobRequest;
|
||||
@ -58,6 +62,7 @@ import org.elasticsearch.client.ml.job.config.DataDescription;
|
||||
import org.elasticsearch.client.ml.job.config.DetectionRule;
|
||||
import org.elasticsearch.client.ml.job.config.Detector;
|
||||
import org.elasticsearch.client.ml.job.config.Job;
|
||||
import org.elasticsearch.client.ml.job.process.DataCounts;
|
||||
import org.elasticsearch.client.ml.job.config.JobUpdate;
|
||||
import org.elasticsearch.client.ml.job.config.ModelPlotConfig;
|
||||
import org.elasticsearch.client.ml.job.config.Operator;
|
||||
@ -691,6 +696,73 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testForecastJob() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
Job job = MachineLearningIT.buildJob("forecasting-my-first-machine-learning-job");
|
||||
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
|
||||
|
||||
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
|
||||
for(int i = 0; i < 30; i++) {
|
||||
Map<String, Object> hashMap = new HashMap<>();
|
||||
hashMap.put("total", randomInt(1000));
|
||||
hashMap.put("timestamp", (i+1)*1000);
|
||||
builder.addDoc(hashMap);
|
||||
}
|
||||
PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder);
|
||||
client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
|
||||
client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT);
|
||||
|
||||
{
|
||||
//tag::x-pack-ml-forecast-job-request
|
||||
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); //<1>
|
||||
//end::x-pack-ml-forecast-job-request
|
||||
|
||||
//tag::x-pack-ml-forecast-job-request-options
|
||||
forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); //<1>
|
||||
forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); //<2>
|
||||
//end::x-pack-ml-forecast-job-request-options
|
||||
|
||||
//tag::x-pack-ml-forecast-job-execute
|
||||
ForecastJobResponse forecastJobResponse = client.machineLearning().forecastJob(forecastJobRequest, RequestOptions.DEFAULT);
|
||||
//end::x-pack-ml-forecast-job-execute
|
||||
|
||||
//tag::x-pack-ml-forecast-job-response
|
||||
boolean isAcknowledged = forecastJobResponse.isAcknowledged(); //<1>
|
||||
String forecastId = forecastJobResponse.getForecastId(); //<2>
|
||||
//end::x-pack-ml-forecast-job-response
|
||||
assertTrue(isAcknowledged);
|
||||
assertNotNull(forecastId);
|
||||
}
|
||||
{
|
||||
//tag::x-pack-ml-forecast-job-listener
|
||||
ActionListener<ForecastJobResponse> listener = new ActionListener<ForecastJobResponse>() {
|
||||
@Override
|
||||
public void onResponse(ForecastJobResponse forecastJobResponse) {
|
||||
//<1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
//end::x-pack-ml-forecast-job-listener
|
||||
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job");
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-ml-forecast-job-execute-async
|
||||
client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); //<1>
|
||||
// end::x-pack-ml-forecast-job-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetOverallBuckets() throws IOException, InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
@ -882,6 +954,73 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testPostData() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
Job job = MachineLearningIT.buildJob("test-post-data");
|
||||
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
|
||||
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
|
||||
|
||||
{
|
||||
//tag::x-pack-ml-post-data-request
|
||||
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); //<1>
|
||||
Map<String, Object> mapData = new HashMap<>();
|
||||
mapData.put("total", 109);
|
||||
jsonBuilder.addDoc(mapData); //<2>
|
||||
jsonBuilder.addDoc("{\"total\":1000}"); //<3>
|
||||
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); //<4>
|
||||
//end::x-pack-ml-post-data-request
|
||||
|
||||
|
||||
//tag::x-pack-ml-post-data-request-options
|
||||
postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); //<1>
|
||||
postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); //<2>
|
||||
//end::x-pack-ml-post-data-request-options
|
||||
postDataRequest.setResetEnd(null);
|
||||
postDataRequest.setResetStart(null);
|
||||
|
||||
//tag::x-pack-ml-post-data-execute
|
||||
PostDataResponse postDataResponse = client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
|
||||
//end::x-pack-ml-post-data-execute
|
||||
|
||||
//tag::x-pack-ml-post-data-response
|
||||
DataCounts dataCounts = postDataResponse.getDataCounts(); //<1>
|
||||
//end::x-pack-ml-post-data-response
|
||||
assertEquals(2, dataCounts.getInputRecordCount());
|
||||
|
||||
}
|
||||
{
|
||||
//tag::x-pack-ml-post-data-listener
|
||||
ActionListener<PostDataResponse> listener = new ActionListener<PostDataResponse>() {
|
||||
@Override
|
||||
public void onResponse(PostDataResponse postDataResponse) {
|
||||
//<1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
//end::x-pack-ml-post-data-listener
|
||||
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder();
|
||||
Map<String, Object> mapData = new HashMap<>();
|
||||
mapData.put("total", 109);
|
||||
jsonBuilder.addDoc(mapData);
|
||||
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); //<1>
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-ml-post-data-execute-async
|
||||
client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); //<1>
|
||||
// end::x-pack-ml-post-data-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetInfluencers() throws IOException, InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -24,9 +24,12 @@ import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.security.DisableUserRequest;
|
||||
import org.elasticsearch.client.security.EnableUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserRequest;
|
||||
import org.elasticsearch.client.security.PutUserResponse;
|
||||
import org.elasticsearch.client.security.RefreshPolicy;
|
||||
import org.elasticsearch.client.security.EmptyResponse;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
@ -38,16 +41,16 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
//tag::x-pack-put-user-execute
|
||||
//tag::put-user-execute
|
||||
char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' };
|
||||
PutUserRequest request =
|
||||
new PutUserRequest("example", password, Collections.singletonList("superuser"), null, null, true, null, RefreshPolicy.NONE);
|
||||
PutUserResponse response = client.security().putUser(request, RequestOptions.DEFAULT);
|
||||
//end::x-pack-put-user-execute
|
||||
//end::put-user-execute
|
||||
|
||||
//tag::x-pack-put-user-response
|
||||
//tag::put-user-response
|
||||
boolean isCreated = response.isCreated(); // <1>
|
||||
//end::x-pack-put-user-response
|
||||
//end::put-user-response
|
||||
|
||||
assertTrue(isCreated);
|
||||
}
|
||||
@ -56,7 +59,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' };
|
||||
PutUserRequest request = new PutUserRequest("example2", password, Collections.singletonList("superuser"), null, null, true,
|
||||
null, RefreshPolicy.NONE);
|
||||
// tag::x-pack-put-user-execute-listener
|
||||
// tag::put-user-execute-listener
|
||||
ActionListener<PutUserResponse> listener = new ActionListener<PutUserResponse>() {
|
||||
@Override
|
||||
public void onResponse(PutUserResponse response) {
|
||||
@ -68,15 +71,104 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::x-pack-put-user-execute-listener
|
||||
// end::put-user-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::x-pack-put-user-execute-async
|
||||
// tag::put-user-execute-async
|
||||
client.security().putUserAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::x-pack-put-user-execute-async
|
||||
// end::put-user-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testEnableUser() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||
PutUserRequest putUserRequest = new PutUserRequest("enable_user", password, Collections.singletonList("superuser"), null,
|
||||
null, true, null, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putUserResponse.isCreated());
|
||||
|
||||
{
|
||||
//tag::enable-user-execute
|
||||
EnableUserRequest request = new EnableUserRequest("enable_user", RefreshPolicy.NONE);
|
||||
EmptyResponse response = client.security().enableUser(request, RequestOptions.DEFAULT);
|
||||
//end::enable-user-execute
|
||||
|
||||
assertNotNull(response);
|
||||
}
|
||||
|
||||
{
|
||||
//tag::enable-user-execute-listener
|
||||
EnableUserRequest request = new EnableUserRequest("enable_user", RefreshPolicy.NONE);
|
||||
ActionListener<EmptyResponse> listener = new ActionListener<EmptyResponse>() {
|
||||
@Override
|
||||
public void onResponse(EmptyResponse setUserEnabledResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
//end::enable-user-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::enable-user-execute-async
|
||||
client.security().enableUserAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::enable-user-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDisableUser() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||
PutUserRequest putUserRequest = new PutUserRequest("disable_user", password, Collections.singletonList("superuser"), null,
|
||||
null, true, null, RefreshPolicy.IMMEDIATE);
|
||||
PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT);
|
||||
assertTrue(putUserResponse.isCreated());
|
||||
{
|
||||
//tag::disable-user-execute
|
||||
DisableUserRequest request = new DisableUserRequest("disable_user", RefreshPolicy.NONE);
|
||||
EmptyResponse response = client.security().disableUser(request, RequestOptions.DEFAULT);
|
||||
//end::disable-user-execute
|
||||
|
||||
assertNotNull(response);
|
||||
}
|
||||
|
||||
{
|
||||
//tag::disable-user-execute-listener
|
||||
DisableUserRequest request = new DisableUserRequest("disable_user", RefreshPolicy.NONE);
|
||||
ActionListener<EmptyResponse> listener = new ActionListener<EmptyResponse>() {
|
||||
@Override
|
||||
public void onResponse(EmptyResponse setUserEnabledResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
//end::disable-user-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::disable-user-execute-async
|
||||
client.security().disableUserAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::disable-user-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
@ -17,21 +17,21 @@ package org.elasticsearch.client.documentation;/*
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.StoredScriptSource;
|
||||
@ -42,7 +42,8 @@ import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
@ -187,14 +188,124 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testPutScript() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
// tag::put-stored-script-request
|
||||
PutStoredScriptRequest request = new PutStoredScriptRequest();
|
||||
request.id("id"); // <1>
|
||||
request.content(new BytesArray(
|
||||
"{\n" +
|
||||
"\"script\": {\n" +
|
||||
"\"lang\": \"painless\",\n" +
|
||||
"\"source\": \"Math.log(_score * 2) + params.multiplier\"" +
|
||||
"}\n" +
|
||||
"}\n"
|
||||
), XContentType.JSON); // <2>
|
||||
// end::put-stored-script-request
|
||||
|
||||
// tag::put-stored-script-context
|
||||
request.context("context"); // <1>
|
||||
// end::put-stored-script-context
|
||||
|
||||
// tag::put-stored-script-timeout
|
||||
request.timeout(TimeValue.timeValueMinutes(2)); // <1>
|
||||
request.timeout("2m"); // <2>
|
||||
// end::put-stored-script-timeout
|
||||
|
||||
// tag::put-stored-script-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::put-stored-script-masterTimeout
|
||||
}
|
||||
|
||||
{
|
||||
PutStoredScriptRequest request = new PutStoredScriptRequest();
|
||||
request.id("id");
|
||||
|
||||
// tag::put-stored-script-content-painless
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject("script");
|
||||
{
|
||||
builder.field("lang", "painless");
|
||||
builder.field("source", "Math.log(_score * 2) + params.multiplier");
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
request.content(BytesReference.bytes(builder), XContentType.JSON); // <1>
|
||||
// end::put-stored-script-content-painless
|
||||
|
||||
|
||||
// tag::put-stored-script-execute
|
||||
AcknowledgedResponse putStoredScriptResponse = client.putScript(request, RequestOptions.DEFAULT);
|
||||
// end::put-stored-script-execute
|
||||
|
||||
// tag::put-stored-script-response
|
||||
boolean acknowledged = putStoredScriptResponse.isAcknowledged(); // <1>
|
||||
// end::put-stored-script-response
|
||||
|
||||
assertTrue(acknowledged);
|
||||
|
||||
// tag::put-stored-script-execute-listener
|
||||
ActionListener<AcknowledgedResponse> listener =
|
||||
new ActionListener<AcknowledgedResponse>() {
|
||||
@Override
|
||||
public void onResponse(AcknowledgedResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::put-stored-script-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::put-stored-script-execute-async
|
||||
client.putScriptAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::put-stored-script-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
{
|
||||
PutStoredScriptRequest request = new PutStoredScriptRequest();
|
||||
request.id("id");
|
||||
|
||||
// tag::put-stored-script-content-mustache
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
{
|
||||
builder.startObject("script");
|
||||
{
|
||||
builder.field("lang", "mustache");
|
||||
builder.field("source", "{\"query\":{\"match\":{\"title\":\"{{query_string}}\"}}}");
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
request.content(BytesReference.bytes(builder), XContentType.JSON); // <1>
|
||||
// end::put-stored-script-content-mustache
|
||||
|
||||
client.putScript(request, RequestOptions.DEFAULT);
|
||||
|
||||
Map<String, Object> script = getAsMap("/_scripts/id");
|
||||
assertThat(extractValue("script.lang", script), equalTo("mustache"));
|
||||
assertThat(extractValue("script.source", script), equalTo("{\"query\":{\"match\":{\"title\":\"{{query_string}}\"}}}"));
|
||||
}
|
||||
}
|
||||
|
||||
private void putStoredScript(String id, StoredScriptSource scriptSource) throws IOException {
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Request request = new Request("PUT", "/_scripts/" + id);
|
||||
request.setJsonEntity("{\"script\":" + script + "}");
|
||||
Response putResponse = adminClient().performRequest(request);
|
||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
PutStoredScriptRequest request =
|
||||
new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource);
|
||||
assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync));
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ForecastJobRequestTests extends AbstractXContentTestCase<ForecastJobRequest> {
|
||||
|
||||
@Override
|
||||
protected ForecastJobRequest createTestInstance() {
|
||||
ForecastJobRequest request = new ForecastJobRequest(randomAlphaOfLengthBetween(1, 20));
|
||||
|
||||
if (randomBoolean()) {
|
||||
request.setExpiresIn(TimeValue.timeValueHours(randomInt(10)));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
request.setDuration(TimeValue.timeValueHours(randomIntBetween(24, 72)));
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ForecastJobRequest doParseInstance(XContentParser parser) throws IOException {
|
||||
return ForecastJobRequest.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ForecastJobResponseTests extends AbstractXContentTestCase<ForecastJobResponse> {
|
||||
|
||||
@Override
|
||||
protected ForecastJobResponse createTestInstance() {
|
||||
return new ForecastJobResponse(randomBoolean(),randomAlphaOfLength(10));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ForecastJobResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return ForecastJobResponse.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
@ -0,0 +1,90 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
public class PostDataRequestTests extends AbstractXContentTestCase<PostDataRequest> {
|
||||
|
||||
@Override
|
||||
protected PostDataRequest createTestInstance() {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
XContentType contentType = randomFrom(XContentType.JSON, XContentType.SMILE);
|
||||
|
||||
PostDataRequest request = new PostDataRequest(jobId, contentType, new byte[0]);
|
||||
if (randomBoolean()) {
|
||||
request.setResetEnd(randomAlphaOfLength(10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
request.setResetStart(randomAlphaOfLength(10));
|
||||
}
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PostDataRequest doParseInstance(XContentParser parser) {
|
||||
return PostDataRequest.PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void testJsonBuilder() throws IOException {
|
||||
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
|
||||
|
||||
Map<String, Object> obj1 = new HashMap<>();
|
||||
obj1.put("entry1", "value1");
|
||||
obj1.put("entry2", "value2");
|
||||
builder.addDoc(obj1);
|
||||
|
||||
builder.addDoc("{\"entry3\":\"value3\"}");
|
||||
builder.addDoc("{\"entry4\":\"value4\"}".getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
PostDataRequest request = new PostDataRequest(jobId, builder);
|
||||
|
||||
assertEquals("{\"entry1\":\"value1\",\"entry2\":\"value2\"}{\"entry3\":\"value3\"}{\"entry4\":\"value4\"}",
|
||||
request.getContent().utf8ToString());
|
||||
assertEquals(XContentType.JSON, request.getXContentType());
|
||||
assertEquals(jobId, request.getJobId());
|
||||
}
|
||||
|
||||
public void testFromByteArray() {
|
||||
String jobId = randomAlphaOfLength(10);
|
||||
PostDataRequest request = new PostDataRequest(jobId,
|
||||
XContentType.JSON,
|
||||
"{\"others\":{\"foo\":100}}".getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
assertEquals("{\"others\":{\"foo\":100}}", request.getContent().utf8ToString());
|
||||
assertEquals(XContentType.JSON, request.getXContentType());
|
||||
assertEquals(jobId, request.getJobId());
|
||||
}
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.client.ml;
|
||||
|
||||
import org.elasticsearch.client.ml.job.process.DataCountsTests;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class PostDataResponseTests extends AbstractXContentTestCase<PostDataResponse> {
|
||||
|
||||
@Override
|
||||
protected PostDataResponse createTestInstance() {
|
||||
return new PostDataResponse(DataCountsTests.createTestInstance(randomAlphaOfLength(10)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PostDataResponse doParseInstance(XContentParser parser) throws IOException {
|
||||
return PostDataResponse.fromXContent(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean supportsUnknownFields() {
|
||||
return true;
|
||||
}
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.security;
|
||||
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class EmptyResponseTests extends ESTestCase {
|
||||
|
||||
public void testParseFromXContent() throws IOException {
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{}")) {
|
||||
|
||||
EmptyResponse response = EmptyResponse.fromXContent(parser);
|
||||
assertNotNull(response);
|
||||
}
|
||||
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY,
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{\"foo\": \"bar\"}")) {
|
||||
|
||||
XContentParseException exception =
|
||||
expectThrows(XContentParseException.class, () -> EmptyResponse.fromXContent(parser));
|
||||
assertThat(exception.getMessage(), containsString("field [foo]"));
|
||||
}
|
||||
}
|
||||
}
|
@ -1,2 +1,23 @@
|
||||
// This file is intentionally blank. All configuration of the
|
||||
// distribution is done in the parent project.
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
integTestRunner {
|
||||
systemProperty 'tests.logfile',
|
||||
"${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }.log"
|
||||
}
|
||||
|
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.unconfigurednodename;
|
||||
|
||||
import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.BufferedReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
|
||||
public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
|
||||
@Override
|
||||
protected BufferedReader openReader(Path logFile) throws IOException {
|
||||
return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
|
||||
try {
|
||||
return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.test.rest;
|
||||
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.Response;
|
||||
@ -37,7 +38,8 @@ import java.util.Map;
|
||||
/**
|
||||
* Tests that wait for refresh is fired if the index is closed.
|
||||
*/
|
||||
public class WaitForRefreshAndCloseTests extends ESRestTestCase {
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33533")
|
||||
public class WaitForRefreshAndCloseIT extends ESRestTestCase {
|
||||
@Before
|
||||
public void setupIndex() throws IOException {
|
||||
try {
|
@ -0,0 +1,4 @@
|
||||
grant {
|
||||
// Needed to read the log file
|
||||
permission java.io.FilePermission "${tests.logfile}", "read";
|
||||
};
|
76
docs/java-rest/high-level/ml/forecast-job.asciidoc
Normal file
76
docs/java-rest/high-level/ml/forecast-job.asciidoc
Normal file
@ -0,0 +1,76 @@
|
||||
[[java-rest-high-x-pack-ml-forecast-job]]
|
||||
=== Forecast Job API
|
||||
|
||||
The Forecast Job API provides the ability to forecast a {ml} job's behavior based
|
||||
on historical data.
|
||||
It accepts a `ForecastJobRequest` object and responds
|
||||
with a `ForecastJobResponse` object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-request]]
|
||||
==== Forecast Job Request
|
||||
|
||||
A `ForecastJobRequest` object gets created with an existing non-null `jobId`.
|
||||
All other fields are optional for the request.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request]
|
||||
--------------------------------------------------
|
||||
<1> Constructing a new request referencing an existing `jobId`
|
||||
|
||||
==== Optional Arguments
|
||||
|
||||
The following arguments are optional.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Set when the forecast for the job should expire
|
||||
<2> Set how far into the future should the forecast predict
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ForecastJobRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `ForecastJobResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-forecast-job-response]]
|
||||
==== Forecast Job Response
|
||||
|
||||
A `ForecastJobResponse` contains an acknowledgement and the forecast ID
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-response]
|
||||
--------------------------------------------------
|
||||
<1> `isAcknowledged()` indicates if the forecast was successful
|
||||
<2> `getForecastId()` provides the ID of the forecast that was created
|
86
docs/java-rest/high-level/ml/post-data.asciidoc
Normal file
86
docs/java-rest/high-level/ml/post-data.asciidoc
Normal file
@ -0,0 +1,86 @@
|
||||
[[java-rest-high-x-pack-ml-post-data]]
|
||||
=== Post Data API
|
||||
|
||||
The Post Data API provides the ability to post data to an open
|
||||
{ml} job in the cluster.
|
||||
It accepts a `PostDataRequest` object and responds
|
||||
with a `PostDataResponse` object.
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-request]]
|
||||
==== Post Data Request
|
||||
|
||||
A `PostDataRequest` object gets created with an existing non-null `jobId`
|
||||
and the `XContentType` being sent. Individual docs can be added
|
||||
incrementally via the `PostDataRequest.JsonBuilder#addDoc` method.
|
||||
These are then serialized and sent in bulk when passed to the `PostDataRequest`.
|
||||
|
||||
Alternatively, the serialized bulk content can be set manually, along with its `XContentType`
|
||||
through one of the other `PostDataRequest` constructors.
|
||||
|
||||
Only `XContentType.JSON` and `XContentType.SMILE` are supported.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request]
|
||||
--------------------------------------------------
|
||||
<1> Create a new `PostDataRequest.JsonBuilder` object for incrementally adding documents
|
||||
<2> Add a new document as a `Map<String, Object>` object
|
||||
<3> Add a new document as a serialized JSON formatted String.
|
||||
<4> Constructing a new request referencing an opened `jobId`, and a JsonBuilder
|
||||
|
||||
==== Optional Arguments
|
||||
|
||||
The following arguments are optional.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request-options]
|
||||
--------------------------------------------------
|
||||
<1> Set the start of the bucket resetting time
|
||||
<2> Set the end of the bucket resetting time
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-execution]]
|
||||
==== Execution
|
||||
|
||||
The request can be executed through the `MachineLearningClient` contained
|
||||
in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-execution-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The request can also be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PostDataRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The method does not block and returns immediately. The passed `ActionListener` is used
|
||||
to notify the caller of completion. A typical `ActionListener` for `PostDataResponse` may
|
||||
look like
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-listener]
|
||||
--------------------------------------------------
|
||||
<1> `onResponse` is called back when the action is completed successfully
|
||||
<2> `onFailure` is called back when some unexpected error occurs
|
||||
|
||||
[[java-rest-high-x-pack-ml-post-data-response]]
|
||||
==== Post Data Response
|
||||
|
||||
A `PostDataResponse` contains current data processing statistics.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-response]
|
||||
--------------------------------------------------
|
||||
<1> `getDataCounts()` a `DataCounts` object containing the current
|
||||
data processing counts.
|
106
docs/java-rest/high-level/script/put_script.asciidoc
Normal file
106
docs/java-rest/high-level/script/put_script.asciidoc
Normal file
@ -0,0 +1,106 @@
|
||||
[[java-rest-high-put-stored-script]]
|
||||
=== Put Stored Script API
|
||||
|
||||
[[java-rest-high-put-stored-script-request]]
|
||||
==== Put Stored Script Request
|
||||
|
||||
A `PutStoredScriptRequest` requires an `id` and `content`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-request]
|
||||
--------------------------------------------------
|
||||
<1> The id of the script
|
||||
<2> The content of the script
|
||||
|
||||
[[java-rest-high-put-stored-script-content]]
|
||||
==== Content
|
||||
The content of a script can be written in different languages and provided in
|
||||
different ways:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-content-painless]
|
||||
--------------------------------------------------
|
||||
<1> Specify a painless script and provided as `XContentBuilder` object.
|
||||
Note that the builder needs to be passed as a `BytesReference` object
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-content-mustache]
|
||||
--------------------------------------------------
|
||||
<1> Specify a mustache script and provided as `XContentBuilder` object.
|
||||
Note that value of source can be directly provided as a JSON string
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-context]
|
||||
--------------------------------------------------
|
||||
<1> The context the script should be executed in.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for the all the nodes to acknowledge the script creation as a `TimeValue`
|
||||
<2> Timeout to wait for the all the nodes to acknowledge the script creation as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`
|
||||
<2> Timeout to connect to the master node as a `String`
|
||||
|
||||
[[java-rest-high-put-stored-script-sync]]
|
||||
==== Synchronous Execution
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-put-stored-script-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a put stored script request requires both the `PutStoredScriptRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PutStoredScriptRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
[[java-rest-high-put-stored-script-listener]]
|
||||
===== Action Listener
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `AcknowledgedResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-put-stored-script-response]]
|
||||
==== Put Stored Script Response
|
||||
|
||||
The returned `AcknowledgedResponse` allows to retrieve information about the
|
||||
executed operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-response]
|
||||
--------------------------------------------------
|
||||
<1> Indicates whether all of the nodes have acknowledged the request
|
46
docs/java-rest/high-level/security/disable-user.asciidoc
Normal file
46
docs/java-rest/high-level/security/disable-user.asciidoc
Normal file
@ -0,0 +1,46 @@
|
||||
[[java-rest-high-security-disable-user]]
|
||||
=== Disable User API
|
||||
|
||||
[[java-rest-high-security-disable-user-execution]]
|
||||
==== Execution
|
||||
|
||||
Disabling a user can be performed using the `security().disableUser()`
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[disable-user-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-security-disable-user-response]]
|
||||
==== Response
|
||||
|
||||
The returned `EmptyResponse` does not contain any fields. The return of this
|
||||
response indicates a successful request.
|
||||
|
||||
[[java-rest-high-security-disable-user-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[disable-user-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `DisableUser` request to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
has completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for a `EmptyResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[disable-user-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
46
docs/java-rest/high-level/security/enable-user.asciidoc
Normal file
46
docs/java-rest/high-level/security/enable-user.asciidoc
Normal file
@ -0,0 +1,46 @@
|
||||
[[java-rest-high-security-enable-user]]
|
||||
=== Enable User API
|
||||
|
||||
[[java-rest-high-security-enable-user-execution]]
|
||||
==== Execution
|
||||
|
||||
Enabling a disabled user can be performed using the `security().enableUser()`
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[enable-user-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-security-enable-user-response]]
|
||||
==== Response
|
||||
|
||||
The returned `EmptyResponse` does not contain any fields. The return of this
|
||||
response indicates a successful request.
|
||||
|
||||
[[java-rest-high-security-enable-user-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[enable-user-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `EnableUser` request to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
has completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for a `EmptyResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[enable-user-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
@ -1,7 +1,7 @@
|
||||
[[java-rest-high-x-pack-security-put-user]]
|
||||
=== X-Pack Put User API
|
||||
[[java-rest-high-security-put-user]]
|
||||
=== Put User API
|
||||
|
||||
[[java-rest-high-x-pack-security-put-user-execution]]
|
||||
[[java-rest-high-security-put-user-execution]]
|
||||
==== Execution
|
||||
|
||||
Creating and updating a user can be performed using the `security().putUser()`
|
||||
@ -9,10 +9,10 @@ method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-execute]
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-x-pack-security-put-user-response]]
|
||||
[[java-rest-high-security-put-user-response]]
|
||||
==== Response
|
||||
|
||||
The returned `PutUserResponse` contains a single field, `created`. This field
|
||||
@ -20,21 +20,21 @@ serves as an indication if a user was created or if an existing entry was update
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-response]
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-response]
|
||||
--------------------------------------------------
|
||||
<1> `created` is a boolean indicating whether the user was created or updated
|
||||
|
||||
[[java-rest-high-x-pack-security-put-user-async]]
|
||||
[[java-rest-high-security-put-user-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
This request can be executed asynchronously:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-execute-async]
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `PutUserResponse` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
<1> The `PutUserRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes.
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once the request
|
||||
has completed the `ActionListener` is called back using the `onResponse` method
|
||||
@ -45,8 +45,8 @@ A typical listener for a `PutUserResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-execute-listener]
|
||||
include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
provided as an argument.
|
||||
<2> Called in case of failure. The raised exception is provided as an argument.
|
@ -189,9 +189,11 @@ include::tasks/cancel_tasks.asciidoc[]
|
||||
The Java High Level REST Client supports the following Scripts APIs:
|
||||
|
||||
* <<java-rest-high-get-stored-script>>
|
||||
* <<java-rest-high-put-stored-script>>
|
||||
* <<java-rest-high-delete-stored-script>>
|
||||
|
||||
include::script/get_script.asciidoc[]
|
||||
include::script/put_script.asciidoc[]
|
||||
include::script/delete_script.asciidoc[]
|
||||
|
||||
== Licensing APIs
|
||||
@ -218,9 +220,11 @@ The Java High Level REST Client supports the following Machine Learning APIs:
|
||||
* <<java-rest-high-x-pack-ml-flush-job>>
|
||||
* <<java-rest-high-x-pack-ml-update-job>>
|
||||
* <<java-rest-high-x-pack-ml-get-job-stats>>
|
||||
* <<java-rest-high-x-pack-ml-forecast-job>>
|
||||
* <<java-rest-high-x-pack-ml-get-buckets>>
|
||||
* <<java-rest-high-x-pack-ml-get-overall-buckets>>
|
||||
* <<java-rest-high-x-pack-ml-get-records>>
|
||||
* <<java-rest-high-x-pack-ml-post-data>>
|
||||
* <<java-rest-high-x-pack-ml-get-influencers>>
|
||||
|
||||
include::ml/put-job.asciidoc[]
|
||||
@ -231,9 +235,11 @@ include::ml/close-job.asciidoc[]
|
||||
include::ml/update-job.asciidoc[]
|
||||
include::ml/flush-job.asciidoc[]
|
||||
include::ml/get-job-stats.asciidoc[]
|
||||
include::ml/forecast-job.asciidoc[]
|
||||
include::ml/get-buckets.asciidoc[]
|
||||
include::ml/get-overall-buckets.asciidoc[]
|
||||
include::ml/get-records.asciidoc[]
|
||||
include::ml/post-data.asciidoc[]
|
||||
include::ml/get-influencers.asciidoc[]
|
||||
|
||||
== Migration APIs
|
||||
@ -244,6 +250,18 @@ The Java High Level REST Client supports the following Migration APIs:
|
||||
|
||||
include::migration/get-assistance.asciidoc[]
|
||||
|
||||
== Security APIs
|
||||
|
||||
The Java High Level REST Client supports the following Security APIs:
|
||||
|
||||
* <<java-rest-high-security-put-user>>
|
||||
* <<java-rest-high-security-enable-user>>
|
||||
* <<java-rest-high-security-disable-user>>
|
||||
|
||||
include::security/put-user.asciidoc[]
|
||||
include::security/enable-user.asciidoc[]
|
||||
include::security/disable-user.asciidoc[]
|
||||
|
||||
== Watcher APIs
|
||||
|
||||
The Java High Level REST Client supports the following Watcher APIs:
|
||||
|
@ -179,9 +179,9 @@ specific index module:
|
||||
|
||||
`index.blocks.write`::
|
||||
|
||||
Set to `true` to disable data write operations against the index. Unlike `read_only',
|
||||
this setting does not affect metadata. For instance, you can close an index with a `write`
|
||||
block, but not an index with a `read_only` block.
|
||||
Set to `true` to disable data write operations against the index. Unlike `read_only`,
|
||||
this setting does not affect metadata. For instance, you can close an index with a `write`
|
||||
block, but not an index with a `read_only` block.
|
||||
|
||||
`index.blocks.metadata`::
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
[[breaking_70_api_changes]]
|
||||
=== Breaking API changes in 7.0
|
||||
=== API changes
|
||||
|
||||
==== Camel case and underscore parameters deprecated in 6.x have been removed
|
||||
A number of duplicate parameters deprecated in 6.x have been removed from
|
||||
|
@ -13,7 +13,7 @@ The `prepareExecute` method which created a request builder has been
|
||||
removed from the client api. Instead, construct a builder for the
|
||||
appropriate request directly.
|
||||
|
||||
=== Some Aggregation classes have moved packages
|
||||
==== Some Aggregation classes have moved packages
|
||||
|
||||
* All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages
|
||||
were moved to a single `org.elasticsearch.search.aggregations.metrics` package.
|
||||
|
@ -31,7 +31,7 @@ the index setting `index.mapping.nested_objects.limit`.
|
||||
|
||||
This option is useless now that all indices have at most one type.
|
||||
|
||||
=== The `classic` similarity has been removed
|
||||
==== The `classic` similarity has been removed
|
||||
|
||||
The `classic` similarity relied on coordination factors for scoring to be good
|
||||
in presence of stopwords in the query. This feature has been removed from
|
||||
@ -39,7 +39,7 @@ Lucene, which means that the `classic` similarity now produces scores of lower
|
||||
quality. It is advised to switch to `BM25` instead, which is widely accepted
|
||||
as a better alternative.
|
||||
|
||||
=== Similarities fail when unsupported options are provided
|
||||
==== Similarities fail when unsupported options are provided
|
||||
|
||||
An error will now be thrown when unknown configuration options are provided
|
||||
to similarities. Such unknown parameters were ignored before.
|
||||
|
@ -11,7 +11,7 @@ cluster where {xpack} is installed, then you must download and configure the
|
||||
|
||||
. Add the {xpack} transport JAR file to your *CLASSPATH*. You can download the {xpack}
|
||||
distribution and extract the JAR file manually or you can get it from the
|
||||
https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.jar[Elasticsearch Maven repository].
|
||||
https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.jar[Elasticsearc Maven repository].
|
||||
As with any dependency, you will also need its transitive dependencies. Refer to the
|
||||
https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.pom[X-Pack POM file
|
||||
for your version] when downloading for offline usage.
|
||||
|
@ -2,7 +2,7 @@
|
||||
================================================
|
||||
|
||||
When you extract the zip or tarball packages, the `elasticsearch-n.n.n`
|
||||
directory contains the Elasticsearch `config`, `data`, `logs` and
|
||||
directory contains the Elasticsearh `config`, `data`, `logs` and
|
||||
`plugins` directories.
|
||||
|
||||
We recommend moving these directories out of the Elasticsearch directory
|
||||
|
@ -1 +0,0 @@
|
||||
5f469e925dde5dff81b9d56f465a8babb56cd26b
|
@ -0,0 +1 @@
|
||||
58b9db095c569b4c4da491810f14e1429878b594
|
@ -26,7 +26,7 @@ import org.apache.lucene.expressions.js.VariableContext;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Scorable;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
@ -336,7 +336,7 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
public void setScorer(Scorable scorer) {
|
||||
script.setScorer(scorer);
|
||||
}
|
||||
|
||||
|
@ -61,12 +61,19 @@ public final class Whitelist {
|
||||
/** The {@link List} of all the whitelisted Painless classes. */
|
||||
public final List<WhitelistClass> whitelistClasses;
|
||||
|
||||
/** The {@link List} of all the whitelisted static Painless methods. */
|
||||
public final List<WhitelistMethod> whitelistImportedMethods;
|
||||
|
||||
/** The {@link List} of all the whitelisted Painless bindings. */
|
||||
public final List<WhitelistBinding> whitelistBindings;
|
||||
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public Whitelist(ClassLoader classLoader, List<WhitelistClass> whitelistClasses, List<WhitelistBinding> whitelistBindings) {
|
||||
/** Standard constructor. All values must be not {@code null}. */
|
||||
public Whitelist(ClassLoader classLoader, List<WhitelistClass> whitelistClasses,
|
||||
List<WhitelistMethod> whitelistImportedMethods, List<WhitelistBinding> whitelistBindings) {
|
||||
|
||||
this.classLoader = Objects.requireNonNull(classLoader);
|
||||
this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses));
|
||||
this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistImportedMethods));
|
||||
this.whitelistBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistBindings));
|
||||
}
|
||||
}
|
||||
|
@ -133,6 +133,7 @@ public final class WhitelistLoader {
|
||||
*/
|
||||
public static Whitelist loadFromResourceFiles(Class<?> resource, String... filepaths) {
|
||||
List<WhitelistClass> whitelistClasses = new ArrayList<>();
|
||||
List<WhitelistMethod> whitelistStatics = new ArrayList<>();
|
||||
List<WhitelistBinding> whitelistBindings = new ArrayList<>();
|
||||
|
||||
// Execute a single pass through the whitelist text files. This will gather all the
|
||||
@ -192,18 +193,18 @@ public final class WhitelistLoader {
|
||||
whitelistConstructors = new ArrayList<>();
|
||||
whitelistMethods = new ArrayList<>();
|
||||
whitelistFields = new ArrayList<>();
|
||||
} else if (line.startsWith("static ")) {
|
||||
} else if (line.startsWith("static_import ")) {
|
||||
// Ensure the final token of the line is '{'.
|
||||
if (line.endsWith("{") == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid static definition: failed to parse static opening bracket [" + line + "]");
|
||||
"invalid static import definition: failed to parse static import opening bracket [" + line + "]");
|
||||
}
|
||||
|
||||
if (parseType != null) {
|
||||
throw new IllegalArgumentException("invalid definition: cannot embed static definition [" + line + "]");
|
||||
throw new IllegalArgumentException("invalid definition: cannot embed static import definition [" + line + "]");
|
||||
}
|
||||
|
||||
parseType = "static";
|
||||
parseType = "static_import";
|
||||
|
||||
// Handle the end of a definition and reset all previously gathered values.
|
||||
// Expects the following format: '}' '\n'
|
||||
@ -229,9 +230,9 @@ public final class WhitelistLoader {
|
||||
// Reset the parseType.
|
||||
parseType = null;
|
||||
|
||||
// Handle static definition types.
|
||||
// Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' 'bound_to' ID '\n'
|
||||
} else if ("static".equals(parseType)) {
|
||||
// Handle static import definition types.
|
||||
// Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' ( 'from_class' | 'bound_to' ) ID '\n'
|
||||
} else if ("static_import".equals(parseType)) {
|
||||
// Mark the origin of this parsable object.
|
||||
String origin = "[" + filepath + "]:[" + number + "]";
|
||||
|
||||
@ -240,7 +241,7 @@ public final class WhitelistLoader {
|
||||
|
||||
if (parameterStartIndex == -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"illegal static definition: start of method parameters not found [" + line + "]");
|
||||
"illegal static import definition: start of method parameters not found [" + line + "]");
|
||||
}
|
||||
|
||||
String[] tokens = line.substring(0, parameterStartIndex).trim().split("\\s+");
|
||||
@ -261,7 +262,7 @@ public final class WhitelistLoader {
|
||||
|
||||
if (parameterEndIndex == -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"illegal static definition: end of method parameters not found [" + line + "]");
|
||||
"illegal static import definition: end of method parameters not found [" + line + "]");
|
||||
}
|
||||
|
||||
String[] canonicalTypeNameParameters =
|
||||
@ -272,39 +273,37 @@ public final class WhitelistLoader {
|
||||
canonicalTypeNameParameters = new String[0];
|
||||
}
|
||||
|
||||
// Parse the static type and class.
|
||||
// Parse the static import type and class.
|
||||
tokens = line.substring(parameterEndIndex + 1).trim().split("\\s+");
|
||||
|
||||
String staticType;
|
||||
String staticImportType;
|
||||
String targetJavaClassName;
|
||||
|
||||
// Based on the number of tokens, look up the type and class.
|
||||
if (tokens.length == 2) {
|
||||
staticType = tokens[0];
|
||||
staticImportType = tokens[0];
|
||||
targetJavaClassName = tokens[1];
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid static definition: unexpected format [" + line + "]");
|
||||
throw new IllegalArgumentException("invalid static import definition: unexpected format [" + line + "]");
|
||||
}
|
||||
|
||||
// Check the static type is valid.
|
||||
if ("bound_to".equals(staticType) == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid static definition: unexpected static type [" + staticType + "] [" + line + "]");
|
||||
// Add a static import method or binding depending on the static import type.
|
||||
if ("from_class".equals(staticImportType)) {
|
||||
whitelistStatics.add(new WhitelistMethod(origin, targetJavaClassName,
|
||||
methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters)));
|
||||
} else if ("bound_to".equals(staticImportType)) {
|
||||
whitelistBindings.add(new WhitelistBinding(origin, targetJavaClassName,
|
||||
methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters)));
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid static import definition: " +
|
||||
"unexpected static import type [" + staticImportType + "] [" + line + "]");
|
||||
}
|
||||
|
||||
whitelistBindings.add(new WhitelistBinding(origin, targetJavaClassName,
|
||||
methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters)));
|
||||
|
||||
// Handle class definition types.
|
||||
} else if ("class".equals(parseType)) {
|
||||
// Mark the origin of this parsable object.
|
||||
String origin = "[" + filepath + "]:[" + number + "]";
|
||||
|
||||
// Ensure we have a defined class before adding any constructors, methods, augmented methods, or fields.
|
||||
if (parseType == null) {
|
||||
throw new IllegalArgumentException("invalid definition: expected one of ['class', 'static'] [" + line + "]");
|
||||
}
|
||||
|
||||
// Handle the case for a constructor definition.
|
||||
// Expects the following format: '(' ( ID ( ',' ID )* )? ')' '\n'
|
||||
if (line.startsWith("(")) {
|
||||
@ -393,7 +392,7 @@ public final class WhitelistLoader {
|
||||
|
||||
ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>)resource::getClassLoader);
|
||||
|
||||
return new Whitelist(loader, whitelistClasses, whitelistBindings);
|
||||
return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistBindings);
|
||||
}
|
||||
|
||||
private WhitelistLoader() {}
|
||||
|
@ -24,6 +24,21 @@ import java.util.function.Function;
|
||||
|
||||
/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */
|
||||
public class FeatureTest {
|
||||
/** static method that returns true */
|
||||
public static boolean overloadedStatic() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/** static method that returns what you ask it */
|
||||
public static boolean overloadedStatic(boolean whatToReturn) {
|
||||
return whatToReturn;
|
||||
}
|
||||
|
||||
/** static method only whitelisted as a static */
|
||||
public static float staticAddFloatsTest(float x, float y) {
|
||||
return x + y;
|
||||
}
|
||||
|
||||
private int x;
|
||||
private int y;
|
||||
public int z;
|
||||
@ -58,21 +73,12 @@ public class FeatureTest {
|
||||
this.y = y;
|
||||
}
|
||||
|
||||
/** static method that returns true */
|
||||
public static boolean overloadedStatic() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/** static method that returns what you ask it */
|
||||
public static boolean overloadedStatic(boolean whatToReturn) {
|
||||
return whatToReturn;
|
||||
}
|
||||
|
||||
/** method taking two functions! */
|
||||
public Object twoFunctionsOfX(Function<Object,Object> f, Function<Object,Object> g) {
|
||||
return f.apply(g.apply(x));
|
||||
}
|
||||
|
||||
/** method to take in a list */
|
||||
public void listInput(List<Object> list) {
|
||||
|
||||
}
|
||||
|
@ -37,16 +37,23 @@ public final class PainlessLookup {
|
||||
private final Map<String, Class<?>> canonicalClassNamesToClasses;
|
||||
private final Map<Class<?>, PainlessClass> classesToPainlessClasses;
|
||||
|
||||
private final Map<String, PainlessMethod> painlessMethodKeysToImportedPainlessMethods;
|
||||
private final Map<String, PainlessBinding> painlessMethodKeysToPainlessBindings;
|
||||
|
||||
PainlessLookup(Map<String, Class<?>> canonicalClassNamesToClasses, Map<Class<?>, PainlessClass> classesToPainlessClasses,
|
||||
Map<String, PainlessMethod> painlessMethodKeysToImportedPainlessMethods,
|
||||
Map<String, PainlessBinding> painlessMethodKeysToPainlessBindings) {
|
||||
|
||||
Objects.requireNonNull(canonicalClassNamesToClasses);
|
||||
Objects.requireNonNull(classesToPainlessClasses);
|
||||
|
||||
Objects.requireNonNull(painlessMethodKeysToImportedPainlessMethods);
|
||||
Objects.requireNonNull(painlessMethodKeysToPainlessBindings);
|
||||
|
||||
this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses);
|
||||
this.classesToPainlessClasses = Collections.unmodifiableMap(classesToPainlessClasses);
|
||||
|
||||
this.painlessMethodKeysToImportedPainlessMethods = Collections.unmodifiableMap(painlessMethodKeysToImportedPainlessMethods);
|
||||
this.painlessMethodKeysToPainlessBindings = Collections.unmodifiableMap(painlessMethodKeysToPainlessBindings);
|
||||
}
|
||||
|
||||
@ -167,6 +174,14 @@ public final class PainlessLookup {
|
||||
return painlessField;
|
||||
}
|
||||
|
||||
public PainlessMethod lookupImportedPainlessMethod(String methodName, int arity) {
|
||||
Objects.requireNonNull(methodName);
|
||||
|
||||
String painlessMethodKey = buildPainlessMethodKey(methodName, arity);
|
||||
|
||||
return painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey);
|
||||
}
|
||||
|
||||
public PainlessBinding lookupPainlessBinding(String methodName, int arity) {
|
||||
Objects.requireNonNull(methodName);
|
||||
|
||||
|
@ -243,6 +243,14 @@ public final class PainlessLookupBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
for (WhitelistMethod whitelistStatic : whitelist.whitelistImportedMethods) {
|
||||
origin = whitelistStatic.origin;
|
||||
painlessLookupBuilder.addImportedPainlessMethod(
|
||||
whitelist.classLoader, whitelistStatic.augmentedCanonicalClassName,
|
||||
whitelistStatic.methodName, whitelistStatic.returnCanonicalTypeName,
|
||||
whitelistStatic.canonicalTypeNameParameters);
|
||||
}
|
||||
|
||||
for (WhitelistBinding whitelistBinding : whitelist.whitelistBindings) {
|
||||
origin = whitelistBinding.origin;
|
||||
painlessLookupBuilder.addPainlessBinding(
|
||||
@ -261,12 +269,14 @@ public final class PainlessLookupBuilder {
|
||||
private final Map<String, Class<?>> canonicalClassNamesToClasses;
|
||||
private final Map<Class<?>, PainlessClassBuilder> classesToPainlessClassBuilders;
|
||||
|
||||
private final Map<String, PainlessMethod> painlessMethodKeysToImportedPainlessMethods;
|
||||
private final Map<String, PainlessBinding> painlessMethodKeysToPainlessBindings;
|
||||
|
||||
public PainlessLookupBuilder() {
|
||||
canonicalClassNamesToClasses = new HashMap<>();
|
||||
classesToPainlessClassBuilders = new HashMap<>();
|
||||
|
||||
painlessMethodKeysToImportedPainlessMethods = new HashMap<>();
|
||||
painlessMethodKeysToPainlessBindings = new HashMap<>();
|
||||
}
|
||||
|
||||
@ -513,8 +523,9 @@ public final class PainlessLookupBuilder {
|
||||
addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters);
|
||||
}
|
||||
|
||||
public void addPainlessMethod(Class<?> targetClass, Class<?> augmentedClass, String methodName,
|
||||
Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
public void addPainlessMethod(Class<?> targetClass, Class<?> augmentedClass,
|
||||
String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
|
||||
Objects.requireNonNull(targetClass);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnType);
|
||||
@ -573,6 +584,12 @@ public final class PainlessLookupBuilder {
|
||||
} else {
|
||||
try {
|
||||
javaMethod = augmentedClass.getMethod(methodName, javaTypeParameters.toArray(new Class<?>[typeParametersSize]));
|
||||
|
||||
if (Modifier.isStatic(javaMethod.getModifiers()) == false) {
|
||||
throw new IllegalArgumentException("method [[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "] with augmented class " +
|
||||
"[" + typeToCanonicalTypeName(augmentedClass) + "] must be static");
|
||||
}
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " +
|
||||
@ -620,7 +637,7 @@ public final class PainlessLookupBuilder {
|
||||
"with the same arity and different return type or type parameters");
|
||||
}
|
||||
} else {
|
||||
PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey);
|
||||
PainlessMethod painlessMethod = painlessClassBuilder.methods.get(painlessMethodKey);
|
||||
|
||||
if (painlessMethod == null) {
|
||||
MethodHandle methodHandle;
|
||||
@ -788,6 +805,146 @@ public final class PainlessLookupBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
public void addImportedPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName,
|
||||
String methodName, String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
|
||||
Objects.requireNonNull(classLoader);
|
||||
Objects.requireNonNull(targetCanonicalClassName);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnCanonicalTypeName);
|
||||
Objects.requireNonNull(canonicalTypeNameParameters);
|
||||
|
||||
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
|
||||
|
||||
if (targetClass == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
List<Class<?>> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size());
|
||||
|
||||
for (String canonicalTypeNameParameter : canonicalTypeNameParameters) {
|
||||
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
|
||||
|
||||
if (typeParameter == null) {
|
||||
throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for imported method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
typeParameters.add(typeParameter);
|
||||
}
|
||||
|
||||
Class<?> returnType = canonicalTypeNameToType(returnCanonicalTypeName);
|
||||
|
||||
if (returnType == null) {
|
||||
throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for imported method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]");
|
||||
}
|
||||
|
||||
addImportedPainlessMethod(targetClass, methodName, returnType, typeParameters);
|
||||
}
|
||||
|
||||
public void addImportedPainlessMethod(Class<?> targetClass, String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
|
||||
Objects.requireNonNull(targetClass);
|
||||
Objects.requireNonNull(methodName);
|
||||
Objects.requireNonNull(returnType);
|
||||
Objects.requireNonNull(typeParameters);
|
||||
|
||||
if (targetClass == def.class) {
|
||||
throw new IllegalArgumentException("cannot add imported method from reserved class [" + DEF_CLASS_NAME + "]");
|
||||
}
|
||||
|
||||
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
|
||||
|
||||
if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"invalid imported method name [" + methodName + "] for target class [" + targetCanonicalClassName + "].");
|
||||
}
|
||||
|
||||
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
|
||||
|
||||
if (painlessClassBuilder == null) {
|
||||
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
int typeParametersSize = typeParameters.size();
|
||||
List<Class<?>> javaTypeParameters = new ArrayList<>(typeParametersSize);
|
||||
|
||||
for (Class<?> typeParameter : typeParameters) {
|
||||
if (isValidType(typeParameter) == false) {
|
||||
throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " +
|
||||
"not found for imported method [[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
javaTypeParameters.add(typeToJavaType(typeParameter));
|
||||
}
|
||||
|
||||
if (isValidType(returnType) == false) {
|
||||
throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for imported method " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
Method javaMethod;
|
||||
|
||||
try {
|
||||
javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class<?>[typeParametersSize]));
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
throw new IllegalArgumentException("imported method reflection object [[" + targetCanonicalClassName + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme);
|
||||
}
|
||||
|
||||
if (javaMethod.getReturnType() != typeToJavaType(returnType)) {
|
||||
throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " +
|
||||
"does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " +
|
||||
"for imported method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "]");
|
||||
}
|
||||
|
||||
if (Modifier.isStatic(javaMethod.getModifiers()) == false) {
|
||||
throw new IllegalArgumentException("imported method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "] must be static");
|
||||
}
|
||||
|
||||
String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize);
|
||||
|
||||
if (painlessMethodKeysToPainlessBindings.containsKey(painlessMethodKey)) {
|
||||
throw new IllegalArgumentException("imported method and binding cannot have the same name [" + methodName + "]");
|
||||
}
|
||||
|
||||
PainlessMethod importedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey);
|
||||
|
||||
if (importedPainlessMethod == null) {
|
||||
MethodHandle methodHandle;
|
||||
|
||||
try {
|
||||
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
|
||||
} catch (IllegalAccessException iae) {
|
||||
throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " +
|
||||
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
|
||||
}
|
||||
|
||||
MethodType methodType = methodHandle.type();
|
||||
|
||||
importedPainlessMethod = painlessMethodCache.computeIfAbsent(
|
||||
new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters),
|
||||
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
|
||||
|
||||
painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, importedPainlessMethod);
|
||||
} else if (importedPainlessMethod.returnType == returnType &&
|
||||
importedPainlessMethod.typeParameters.equals(typeParameters) == false) {
|
||||
throw new IllegalArgumentException("cannot have imported methods " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(returnType) + "], " +
|
||||
typesToCanonicalTypeNames(typeParameters) + "] and " +
|
||||
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
|
||||
"[" + typeToCanonicalTypeName(importedPainlessMethod.returnType) + "], " +
|
||||
typesToCanonicalTypeNames(importedPainlessMethod.typeParameters) + "] " +
|
||||
"with the same arity and different return type or type parameters");
|
||||
}
|
||||
}
|
||||
|
||||
public void addPainlessBinding(ClassLoader classLoader, String targetJavaClassName,
|
||||
String methodName, String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
|
||||
|
||||
@ -937,6 +1094,11 @@ public final class PainlessLookupBuilder {
|
||||
}
|
||||
|
||||
String painlessMethodKey = buildPainlessMethodKey(methodName, constructorTypeParametersSize + methodTypeParametersSize);
|
||||
|
||||
if (painlessMethodKeysToImportedPainlessMethods.containsKey(painlessMethodKey)) {
|
||||
throw new IllegalArgumentException("binding and imported method cannot have the same name [" + methodName + "]");
|
||||
}
|
||||
|
||||
PainlessBinding painlessBinding = painlessMethodKeysToPainlessBindings.get(painlessMethodKey);
|
||||
|
||||
if (painlessBinding == null) {
|
||||
@ -976,7 +1138,8 @@ public final class PainlessLookupBuilder {
|
||||
classesToPainlessClasses.put(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue().build());
|
||||
}
|
||||
|
||||
return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses, painlessMethodKeysToPainlessBindings);
|
||||
return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses,
|
||||
painlessMethodKeysToImportedPainlessMethods, painlessMethodKeysToPainlessBindings);
|
||||
}
|
||||
|
||||
private void copyPainlessClassMembers() {
|
||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.painless.Locals.LocalMethod;
|
||||
import org.elasticsearch.painless.Location;
|
||||
import org.elasticsearch.painless.MethodWriter;
|
||||
import org.elasticsearch.painless.lookup.PainlessBinding;
|
||||
import org.elasticsearch.painless.lookup.PainlessMethod;
|
||||
import org.objectweb.asm.Label;
|
||||
import org.objectweb.asm.Type;
|
||||
import org.objectweb.asm.commons.Method;
|
||||
@ -45,6 +46,7 @@ public final class ECallLocal extends AExpression {
|
||||
private final List<AExpression> arguments;
|
||||
|
||||
private LocalMethod method = null;
|
||||
private PainlessMethod imported = null;
|
||||
private PainlessBinding binding = null;
|
||||
|
||||
public ECallLocal(Location location, String name, List<AExpression> arguments) {
|
||||
@ -65,16 +67,33 @@ public final class ECallLocal extends AExpression {
|
||||
void analyze(Locals locals) {
|
||||
method = locals.getMethod(name, arguments.size());
|
||||
|
||||
|
||||
if (method == null) {
|
||||
binding = locals.getPainlessLookup().lookupPainlessBinding(name, arguments.size());
|
||||
imported = locals.getPainlessLookup().lookupImportedPainlessMethod(name, arguments.size());
|
||||
|
||||
if (binding == null) {
|
||||
throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments."));
|
||||
if (imported == null) {
|
||||
binding = locals.getPainlessLookup().lookupPainlessBinding(name, arguments.size());
|
||||
|
||||
if (binding == null) {
|
||||
throw createError(
|
||||
new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<Class<?>> typeParameters = new ArrayList<>(method == null ? binding.typeParameters : method.typeParameters);
|
||||
List<Class<?>> typeParameters;
|
||||
|
||||
if (method != null) {
|
||||
typeParameters = new ArrayList<>(method.typeParameters);
|
||||
actual = method.returnType;
|
||||
} else if (imported != null) {
|
||||
typeParameters = new ArrayList<>(imported.typeParameters);
|
||||
actual = imported.returnType;
|
||||
} else if (binding != null) {
|
||||
typeParameters = new ArrayList<>(binding.typeParameters);
|
||||
actual = binding.returnType;
|
||||
} else {
|
||||
throw new IllegalStateException("Illegal tree structure.");
|
||||
}
|
||||
|
||||
for (int argument = 0; argument < arguments.size(); ++argument) {
|
||||
AExpression expression = arguments.get(argument);
|
||||
@ -86,14 +105,26 @@ public final class ECallLocal extends AExpression {
|
||||
}
|
||||
|
||||
statement = true;
|
||||
actual = method == null ? binding.returnType : method.returnType;
|
||||
}
|
||||
|
||||
@Override
|
||||
void write(MethodWriter writer, Globals globals) {
|
||||
writer.writeDebugInfo(location);
|
||||
|
||||
if (method == null) {
|
||||
if (method != null) {
|
||||
for (AExpression argument : arguments) {
|
||||
argument.write(writer, globals);
|
||||
}
|
||||
|
||||
writer.invokeStatic(CLASS_TYPE, new Method(method.name, method.methodType.toMethodDescriptorString()));
|
||||
} else if (imported != null) {
|
||||
for (AExpression argument : arguments) {
|
||||
argument.write(writer, globals);
|
||||
}
|
||||
|
||||
writer.invokeStatic(Type.getType(imported.targetClass),
|
||||
new Method(imported.javaMethod.getName(), imported.methodType.toMethodDescriptorString()));
|
||||
} else if (binding != null) {
|
||||
String name = globals.addBinding(binding.javaConstructor.getDeclaringClass());
|
||||
Type type = Type.getType(binding.javaConstructor.getDeclaringClass());
|
||||
int javaConstructorParameterCount = binding.javaConstructor.getParameterCount();
|
||||
@ -124,11 +155,7 @@ public final class ECallLocal extends AExpression {
|
||||
|
||||
writer.invokeVirtual(type, Method.getMethod(binding.javaMethod));
|
||||
} else {
|
||||
for (AExpression argument : arguments) {
|
||||
argument.write(writer, globals);
|
||||
}
|
||||
|
||||
writer.invokeStatic(CLASS_TYPE, new Method(method.name, method.methodType.toMethodDescriptorString()));
|
||||
throw new IllegalStateException("Illegal tree structure.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -176,6 +176,7 @@ class org.elasticsearch.painless.FeatureTest no_import {
|
||||
}
|
||||
|
||||
# for testing
|
||||
static {
|
||||
static_import {
|
||||
float staticAddFloatsTest(float, float) from_class org.elasticsearch.painless.FeatureTest
|
||||
int testAddWithState(int, int, int, double) bound_to org.elasticsearch.painless.BindingTest
|
||||
}
|
@ -133,4 +133,8 @@ public class BasicAPITests extends ScriptTestCase {
|
||||
public void testNoSemicolon() {
|
||||
assertEquals(true, exec("def x = true; if (x) return x"));
|
||||
}
|
||||
|
||||
public void testStatic() {
|
||||
assertEquals(15.5f, exec("staticAddFloatsTest(6.5f, 9.0f)"));
|
||||
}
|
||||
}
|
||||
|
@ -19,39 +19,25 @@
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Scorable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
public class ScoreTests extends ScriptTestCase {
|
||||
|
||||
/** Most of a dummy scorer impl that requires overriding just score(). */
|
||||
abstract class MockScorer extends Scorer {
|
||||
MockScorer() {
|
||||
super(null);
|
||||
}
|
||||
abstract class MockScorer extends Scorable {
|
||||
@Override
|
||||
public int docID() {
|
||||
return 0;
|
||||
}
|
||||
@Override
|
||||
public DocIdSetIterator iterator() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
public void testScoreWorks() {
|
||||
assertEquals(2.5, exec("_score", Collections.emptyMap(), Collections.emptyMap(),
|
||||
new MockScorer() {
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
return 2.5f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getMaxScore(int upTo) throws IOException {
|
||||
public float score() {
|
||||
return 2.5f;
|
||||
}
|
||||
},
|
||||
@ -62,14 +48,9 @@ public class ScoreTests extends ScriptTestCase {
|
||||
assertEquals(3.5, exec("3.5", Collections.emptyMap(), Collections.emptyMap(),
|
||||
new MockScorer() {
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
public float score() {
|
||||
throw new AssertionError("score() should not be called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getMaxScore(int upTo) throws IOException {
|
||||
return Float.MAX_VALUE;
|
||||
}
|
||||
},
|
||||
true));
|
||||
}
|
||||
@ -79,17 +60,12 @@ public class ScoreTests extends ScriptTestCase {
|
||||
new MockScorer() {
|
||||
private boolean used = false;
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
public float score() {
|
||||
if (used == false) {
|
||||
return 4.5f;
|
||||
}
|
||||
throw new AssertionError("score() should not be called twice");
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getMaxScore(int upTo) throws IOException {
|
||||
return 4.5f;
|
||||
}
|
||||
},
|
||||
true));
|
||||
}
|
||||
|
@ -20,7 +20,7 @@
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
import junit.framework.AssertionFailedError;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Scorable;
|
||||
import org.elasticsearch.common.lucene.ScorerAware;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.painless.antlr.Walker;
|
||||
@ -47,6 +47,8 @@ import static org.hamcrest.Matchers.hasSize;
|
||||
* Typically just asserts the output of {@code exec()}
|
||||
*/
|
||||
public abstract class ScriptTestCase extends ESTestCase {
|
||||
private static final PainlessLookup PAINLESS_LOOKUP = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
|
||||
|
||||
protected PainlessScriptEngine scriptEngine;
|
||||
|
||||
@Before
|
||||
@ -89,15 +91,15 @@ public abstract class ScriptTestCase extends ESTestCase {
|
||||
}
|
||||
|
||||
/** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */
|
||||
public Object exec(String script, Map<String, Object> vars, Map<String,String> compileParams, Scorer scorer, boolean picky) {
|
||||
public Object exec(String script, Map<String, Object> vars, Map<String,String> compileParams, Scorable scorer, boolean picky) {
|
||||
// test for ambiguity errors before running the actual script if picky is true
|
||||
if (picky) {
|
||||
PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
|
||||
ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class);
|
||||
ScriptClassInfo scriptClassInfo = new ScriptClassInfo(PAINLESS_LOOKUP, GenericElasticsearchScript.class);
|
||||
CompilerSettings pickySettings = new CompilerSettings();
|
||||
pickySettings.setPicky(true);
|
||||
pickySettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(scriptEngineSettings()));
|
||||
Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, painlessLookup, null);
|
||||
Walker.buildPainlessTree(
|
||||
scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, PAINLESS_LOOKUP, null);
|
||||
}
|
||||
// test actual script execution
|
||||
ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, compileParams);
|
||||
|
@ -19,13 +19,11 @@
|
||||
|
||||
package org.elasticsearch.painless;
|
||||
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Scorable;
|
||||
import org.elasticsearch.painless.spi.Whitelist;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptedMetricAggContexts;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
@ -66,20 +64,12 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
Map<String, Object> state = new HashMap<>();
|
||||
|
||||
Scorer scorer = new Scorer(null) {
|
||||
Scorable scorer = new Scorable() {
|
||||
@Override
|
||||
public int docID() { return 0; }
|
||||
|
||||
@Override
|
||||
public float score() { return 0.5f; }
|
||||
|
||||
@Override
|
||||
public DocIdSetIterator iterator() { return null; }
|
||||
|
||||
@Override
|
||||
public float getMaxScore(int upTo) throws IOException {
|
||||
return 0.5f;
|
||||
}
|
||||
};
|
||||
|
||||
ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null);
|
||||
|
@ -21,9 +21,9 @@ package org.elasticsearch.join.aggregations;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorable;
|
||||
import org.apache.lucene.search.ScoreMode;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
@ -148,7 +148,17 @@ public class ParentToChildrenAggregator extends BucketsAggregator implements Sin
|
||||
|
||||
final SortedSetDocValues globalOrdinals = valuesSource.globalOrdinalsValues(ctx);
|
||||
// Set the scorer, since we now replay only the child docIds
|
||||
sub.setScorer(new ConstantScoreScorer(null, 1f, childDocsIter));
|
||||
sub.setScorer(new Scorable() {
|
||||
@Override
|
||||
public float score() {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docID() {
|
||||
return childDocsIter.docID();
|
||||
}
|
||||
});
|
||||
|
||||
final Bits liveDocs = ctx.reader().getLiveDocs();
|
||||
for (int docId = childDocsIter
|
||||
|
@ -1 +0,0 @@
|
||||
97a3758487272ba4d15720b0ca15b0f980310c89
|
@ -0,0 +1 @@
|
||||
f009ee188453aabae77fad55aea08bc60323bb3e
|
@ -1 +0,0 @@
|
||||
12ed739794cd317754684308ddc5bdbdcc46cdde
|
@ -0,0 +1 @@
|
||||
af3d2ae975e3560c1ea69222d6c46072857952ba
|
@ -1 +0,0 @@
|
||||
4da6e5c17a17f0a9a99b518ea9985ea06996b63b
|
@ -0,0 +1 @@
|
||||
f17bc5e532d9dc2786a13bd577df64023d1baae1
|
@ -1 +0,0 @@
|
||||
a36b2db18a2a22966ab0bf9fced775f22dd7029d
|
@ -0,0 +1 @@
|
||||
7ad89d33c1cd960c91afa05b22024137fe108567
|
@ -1 +0,0 @@
|
||||
5f1d360a47d2fd166e970d17c46b284830e64258
|
@ -0,0 +1 @@
|
||||
3f11fb254256d74e911b953994b47e7a95915954
|
@ -1 +0,0 @@
|
||||
b07883b5e988d1d991503aa49d9b59059518825d
|
@ -0,0 +1 @@
|
||||
b2348d140ef0c3e674cb81173f61c5e5f430facb
|
@ -1 +0,0 @@
|
||||
1b46b3ee62932de7ba7b670820a13eb973ec5777
|
@ -0,0 +1 @@
|
||||
485a0c3be58a5942b4a28639f1019181ef4cd0e3
|
@ -235,7 +235,7 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
|
||||
() -> client().performRequest(request));
|
||||
assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode());
|
||||
assertThat(responseException.getMessage(),
|
||||
containsString("Missing required setting [cluster.remote.remote1.seeds] " +
|
||||
containsString("missing required setting [cluster.remote.remote1.seeds] " +
|
||||
"for setting [cluster.remote.remote1.skip_unavailable]"));
|
||||
}
|
||||
|
||||
@ -251,7 +251,7 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
|
||||
ResponseException responseException = expectThrows(ResponseException.class,
|
||||
() -> client().performRequest(request));
|
||||
assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode());
|
||||
assertThat(responseException.getMessage(), containsString("Missing required setting [cluster.remote.remote1.seeds] " +
|
||||
assertThat(responseException.getMessage(), containsString("missing required setting [cluster.remote.remote1.seeds] " +
|
||||
"for setting [cluster.remote.remote1.skip_unavailable]"));
|
||||
}
|
||||
|
||||
|
@ -357,7 +357,7 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testNoNodeNameWarning() throws IOException, UserException {
|
||||
public void testNoNodeNameInPatternWarning() throws IOException, UserException {
|
||||
setupLogging("no_node_name");
|
||||
|
||||
final String path =
|
||||
@ -368,7 +368,7 @@ public class EvilLoggerTests extends ESTestCase {
|
||||
assertThat(events.size(), equalTo(2));
|
||||
final String location = "org.elasticsearch.common.logging.LogConfigurator";
|
||||
// the first message is a warning for unsupported configuration files
|
||||
assertLogLine(events.get(0), Level.WARN, location, "\\[null\\] Some logging configurations have %marker but don't "
|
||||
assertLogLine(events.get(0), Level.WARN, location, "\\[unknown\\] Some logging configurations have %marker but don't "
|
||||
+ "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users "
|
||||
+ "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace "
|
||||
+ "`%node_name` with `\\[%node_name\\]%marker ` in these locations:");
|
||||
|
@ -52,7 +52,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
IOException ioException = expectThrows(IOException.class, () -> {
|
||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
||||
});
|
||||
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith(path.toString()));
|
||||
}
|
||||
@ -72,7 +72,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
IOException ioException = expectThrows(IOException.class, () -> {
|
||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
||||
});
|
||||
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory"));
|
||||
}
|
||||
@ -97,7 +97,7 @@ public class NodeEnvironmentEvilTests extends ESTestCase {
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
|
||||
.putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
|
||||
IOException ioException = expectThrows(IOException.class, () -> {
|
||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build));
|
||||
new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {});
|
||||
});
|
||||
assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory"));
|
||||
}
|
||||
|
30
qa/unconfigured-node-name/build.gradle
Normal file
30
qa/unconfigured-node-name/build.gradle
Normal file
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
apply plugin: 'elasticsearch.standalone-rest-test'
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
integTestCluster {
|
||||
setting 'node.name', null
|
||||
}
|
||||
|
||||
integTestRunner {
|
||||
systemProperty 'tests.logfile',
|
||||
"${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }.log"
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.unconfigured_node_name;
|
||||
|
||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.BufferedReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
|
||||
public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase {
|
||||
@Override
|
||||
protected BufferedReader openReader(Path logFile) throws IOException {
|
||||
assumeTrue("We log a line without the node name if we can't install the seccomp filters",
|
||||
BootstrapInfo.isSystemCallFilterInstalled());
|
||||
return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> {
|
||||
try {
|
||||
return Files.newBufferedReader(logFile, StandardCharsets.UTF_8);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void testDummy() {
|
||||
/* Dummy test case so that when we run this test on a platform that
|
||||
* does not support our syscall filters and we skip the test above
|
||||
* we don't fail the entire test run because we skipped all the tests.
|
||||
*/
|
||||
}
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
grant {
|
||||
// Needed to read the log file
|
||||
permission java.io.FilePermission "${tests.logfile}", "read";
|
||||
};
|
@ -1 +0,0 @@
|
||||
fa8e0fbef3e3fcf49ace4a4153580070def770eb
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user