Run spotless and exclude checkstyle on client module (#1392)

Signed-off-by: Owais Kazi <owaiskazi19@gmail.com>
This commit is contained in:
Owais Kazi 2021-10-21 13:13:38 -07:00 committed by GitHub
parent 119701f622
commit 33e70a9886
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
211 changed files with 8038 additions and 5001 deletions

View File

@ -17,23 +17,15 @@
<!-- Intentionally has long example curl commands to coincide with sibling Painless tests. -->
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]painless[/\\]ContextExampleTests.java" checks="LineLength" />
<!-- Exclude server to run checkstyle -->
<!-- Excludes checkstyle run on server module -->
<suppress files="server" checks="." />
<!-- Excludes checkstyle run on client module -->
<suppress files="client" checks="." />
<!--
Truly temporary suppressions suppression of snippets included in
documentation that are so wide that they scroll.
-->
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]ClusterClientDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]CRUDDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]IndicesClientDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]IngestClientDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]MiscellaneousDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]SearchDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]SnapshotClientDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]StoredScriptsDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]TasksClientDocumentationIT.java" id="SnippetLength" />
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]WatcherDocumentationIT.java" id="SnippetLength" />
<suppress files="modules[/\\]reindex[/\\]src[/\\]test[/\\]java[/\\]org[/\\]opensearch[/\\]client[/\\]documentation[/\\]ReindexDocumentationIT.java" id="SnippetLength" />
<!-- Gradle requires inputs to be seriablizable -->

View File

@ -77,8 +77,7 @@ public abstract class AbstractBenchmark<T extends Closeable> {
@SuppressForbidden(reason = "system out is ok for a command line tool")
private void runBulkIndexBenchmark(String[] args) throws Exception {
if (args.length != 7) {
System.err.println(
"usage: 'bulk' benchmarkTargetHostIp indexFilePath indexName typeName numberOfDocuments bulkSize");
System.err.println("usage: 'bulk' benchmarkTargetHostIp indexFilePath indexName typeName numberOfDocuments bulkSize");
System.exit(1);
}
String benchmarkTargetHost = args[1];
@ -95,9 +94,11 @@ public abstract class AbstractBenchmark<T extends Closeable> {
T client = client(benchmarkTargetHost);
BenchmarkRunner benchmark = new BenchmarkRunner(warmupIterations, iterations,
new BulkBenchmarkTask(
bulkRequestExecutor(client, indexName, typeName), indexFilePath, warmupIterations, iterations, bulkSize));
BenchmarkRunner benchmark = new BenchmarkRunner(
warmupIterations,
iterations,
new BulkBenchmarkTask(bulkRequestExecutor(client, indexName, typeName), indexFilePath, warmupIterations, iterations, bulkSize)
);
try {
runTrials(() -> {
@ -113,8 +114,7 @@ public abstract class AbstractBenchmark<T extends Closeable> {
@SuppressForbidden(reason = "system out is ok for a command line tool")
private void runSearchBenchmark(String[] args) throws Exception {
if (args.length != 5) {
System.err.println(
"usage: 'search' benchmarkTargetHostIp indexName searchRequestBody throughputRates");
System.err.println("usage: 'search' benchmarkTargetHostIp indexName searchRequestBody throughputRates");
System.exit(1);
}
String benchmarkTargetHost = args[1];
@ -127,12 +127,19 @@ public abstract class AbstractBenchmark<T extends Closeable> {
try {
runTrials(() -> {
for (int throughput : throughputRates) {
//GC between trials to reduce the likelihood of a GC occurring in the middle of a trial.
// GC between trials to reduce the likelihood of a GC occurring in the middle of a trial.
runGc();
BenchmarkRunner benchmark = new BenchmarkRunner(SEARCH_BENCHMARK_ITERATIONS, SEARCH_BENCHMARK_ITERATIONS,
BenchmarkRunner benchmark = new BenchmarkRunner(
SEARCH_BENCHMARK_ITERATIONS,
SEARCH_BENCHMARK_ITERATIONS,
new SearchBenchmarkTask(
searchRequestExecutor(client, indexName), searchBody, SEARCH_BENCHMARK_ITERATIONS,
SEARCH_BENCHMARK_ITERATIONS, throughput));
searchRequestExecutor(client, indexName),
searchBody,
SEARCH_BENCHMARK_ITERATIONS,
SEARCH_BENCHMARK_ITERATIONS,
throughput
)
);
System.out.printf("Target throughput = %d ops / s%n", throughput);
benchmark.run();
}

View File

@ -55,8 +55,12 @@ public final class BenchmarkRunner {
@SuppressForbidden(reason = "system out is ok for a command line tool")
public void run() {
SampleRecorder recorder = new SampleRecorder(iterations);
System.out.printf("Running %s with %d warmup iterations and %d iterations.%n",
task.getClass().getSimpleName(), warmupIterations, iterations);
System.out.printf(
"Running %s with %d warmup iterations and %d iterations.%n",
task.getClass().getSimpleName(),
warmupIterations,
iterations
);
try {
task.setUp(recorder);
@ -78,14 +82,26 @@ public final class BenchmarkRunner {
for (Metrics metrics : summaryMetrics) {
String throughput = String.format(Locale.ROOT, "Throughput [ops/s]: %f", metrics.throughput);
String serviceTimes = String.format(Locale.ROOT,
String serviceTimes = String.format(
Locale.ROOT,
"Service time [ms]: p50 = %f, p90 = %f, p95 = %f, p99 = %f, p99.9 = %f, p99.99 = %f",
metrics.serviceTimeP50, metrics.serviceTimeP90, metrics.serviceTimeP95,
metrics.serviceTimeP99, metrics.serviceTimeP999, metrics.serviceTimeP9999);
String latencies = String.format(Locale.ROOT,
metrics.serviceTimeP50,
metrics.serviceTimeP90,
metrics.serviceTimeP95,
metrics.serviceTimeP99,
metrics.serviceTimeP999,
metrics.serviceTimeP9999
);
String latencies = String.format(
Locale.ROOT,
"Latency [ms]: p50 = %f, p90 = %f, p95 = %f, p99 = %f, p99.9 = %f, p99.99 = %f",
metrics.latencyP50, metrics.latencyP90, metrics.latencyP95,
metrics.latencyP99, metrics.latencyP999, metrics.latencyP9999);
metrics.latencyP50,
metrics.latencyP90,
metrics.latencyP95,
metrics.latencyP99,
metrics.latencyP999,
metrics.latencyP9999
);
int lineLength = Math.max(serviceTimes.length(), latencies.length());

View File

@ -49,10 +49,24 @@ public final class Metrics {
public final double latencyP999;
public final double latencyP9999;
public Metrics(String operation, long successCount, long errorCount, double throughput,
double serviceTimeP50, double serviceTimeP90, double serviceTimeP95, double serviceTimeP99,
double serviceTimeP999, double serviceTimeP9999, double latencyP50, double latencyP90,
double latencyP95, double latencyP99, double latencyP999, double latencyP9999) {
public Metrics(
String operation,
long successCount,
long errorCount,
double throughput,
double serviceTimeP50,
double serviceTimeP90,
double serviceTimeP95,
double serviceTimeP99,
double serviceTimeP999,
double serviceTimeP9999,
double latencyP50,
double latencyP90,
double latencyP95,
double latencyP99,
double latencyP999,
double latencyP9999
) {
this.operation = operation;
this.successCount = successCount;
this.errorCount = errorCount;

View File

@ -75,24 +75,28 @@ public final class MetricsCalculator {
it++;
}
metrics.add(new Metrics(operationAndMetrics.getKey(),
samples.stream().filter((r) -> r.isSuccess()).count(),
samples.stream().filter((r) -> !r.isSuccess()).count(),
// throughput calculation is based on the total (Wall clock) time it took to generate all samples
calculateThroughput(samples.size(), latestEnd - firstStart),
// convert ns -> ms without losing precision
StatUtils.percentile(serviceTimes, 50.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 90.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 95.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 99.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 99.9d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 99.99d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 50.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 90.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 95.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 99.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 99.9d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 99.99d) / TimeUnit.MILLISECONDS.toNanos(1L)));
metrics.add(
new Metrics(
operationAndMetrics.getKey(),
samples.stream().filter((r) -> r.isSuccess()).count(),
samples.stream().filter((r) -> !r.isSuccess()).count(),
// throughput calculation is based on the total (Wall clock) time it took to generate all samples
calculateThroughput(samples.size(), latestEnd - firstStart),
// convert ns -> ms without losing precision
StatUtils.percentile(serviceTimes, 50.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 90.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 95.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 99.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 99.9d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(serviceTimes, 99.99d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 50.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 90.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 95.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 99.0d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 99.9d) / TimeUnit.MILLISECONDS.toNanos(1L),
StatUtils.percentile(latencies, 99.99d) / TimeUnit.MILLISECONDS.toNanos(1L)
)
);
}
return metrics;
}

View File

@ -62,8 +62,13 @@ public class BulkBenchmarkTask implements BenchmarkTask {
private LoadGenerator generator;
private ExecutorService executorService;
public BulkBenchmarkTask(BulkRequestExecutor requestExecutor, String indexFilePath, int warmupIterations, int measurementIterations,
int bulkSize) {
public BulkBenchmarkTask(
BulkRequestExecutor requestExecutor,
String indexFilePath,
int warmupIterations,
int measurementIterations,
int bulkSize
) {
this.requestExecutor = requestExecutor;
this.indexFilePath = indexFilePath;
this.warmupIterations = warmupIterations;
@ -86,11 +91,11 @@ public class BulkBenchmarkTask implements BenchmarkTask {
@Override
@SuppressForbidden(reason = "system out is ok for a command line tool")
public void run() throws Exception {
public void run() throws Exception {
generator.execute();
// when the generator is done, there are no more data -> shutdown client
executorService.shutdown();
//We need to wait until the queue is drained
// We need to wait until the queue is drained
final boolean finishedNormally = executorService.awaitTermination(20, TimeUnit.MINUTES);
if (finishedNormally == false) {
System.err.println("Background tasks are still running after timeout on enclosing pool. Forcing pool shutdown.");
@ -100,7 +105,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
@Override
public void tearDown() {
//no op
// no op
}
private static final class LoadGenerator {
@ -146,7 +151,6 @@ public class BulkBenchmarkTask implements BenchmarkTask {
}
}
private static final class BulkIndexer implements Runnable {
private static final Logger logger = LogManager.getLogger(BulkIndexer.class);
@ -156,8 +160,13 @@ public class BulkBenchmarkTask implements BenchmarkTask {
private final BulkRequestExecutor bulkRequestExecutor;
private final SampleRecorder sampleRecorder;
BulkIndexer(BlockingQueue<List<String>> bulkData, int warmupIterations, int measurementIterations,
SampleRecorder sampleRecorder, BulkRequestExecutor bulkRequestExecutor) {
BulkIndexer(
BlockingQueue<List<String>> bulkData,
int warmupIterations,
int measurementIterations,
SampleRecorder sampleRecorder,
BulkRequestExecutor bulkRequestExecutor
) {
this.bulkData = bulkData;
this.warmupIterations = warmupIterations;
this.measurementIterations = measurementIterations;
@ -176,7 +185,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
Thread.currentThread().interrupt();
return;
}
//measure only service time, latency is not that interesting for a throughput benchmark
// measure only service time, latency is not that interesting for a throughput benchmark
long start = System.nanoTime();
try {
success = bulkRequestExecutor.bulkIndex(currentBulk);

View File

@ -46,8 +46,13 @@ public class SearchBenchmarkTask implements BenchmarkTask {
private SampleRecorder sampleRecorder;
public SearchBenchmarkTask(SearchRequestExecutor searchRequestExecutor, String body, int warmupIterations,
int measurementIterations, int targetThroughput) {
public SearchBenchmarkTask(
SearchRequestExecutor searchRequestExecutor,
String body,
int warmupIterations,
int measurementIterations,
int targetThroughput
) {
this.searchRequestExecutor = searchRequestExecutor;
this.searchRequestBody = body;
this.warmupIterations = warmupIterations;

View File

@ -56,10 +56,10 @@ public final class RestClientBenchmark extends AbstractBenchmark<RestClient> {
@Override
protected RestClient client(String benchmarkTargetHost) {
return RestClient
.builder(new HttpHost(benchmarkTargetHost, 9200))
.setHttpClientConfigCallback(b -> b.setDefaultHeaders(
Collections.singleton(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "gzip"))))
return RestClient.builder(new HttpHost(benchmarkTargetHost, 9200))
.setHttpClientConfigCallback(
b -> b.setDefaultHeaders(Collections.singleton(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "gzip")))
)
.setRequestConfigCallback(b -> b.setContentCompressionEnabled(true))
.build();
}

View File

@ -64,11 +64,15 @@ public class NoopPlugin extends Plugin implements ActionPlugin {
}
@Override
public List<RestHandler> getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster) {
return Arrays.asList(
new RestNoopBulkAction(),
new RestNoopSearchAction());
public List<RestHandler> getRestHandlers(
Settings settings,
RestController restController,
ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter,
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster
) {
return Arrays.asList(new RestNoopBulkAction(), new RestNoopSearchAction());
}
}

View File

@ -62,13 +62,16 @@ public class RestNoopBulkAction extends BaseRestHandler {
@Override
public List<Route> routes() {
return unmodifiableList(asList(
new Route(POST, "/_noop_bulk"),
new Route(PUT, "/_noop_bulk"),
new Route(POST, "/{index}/_noop_bulk"),
new Route(PUT, "/{index}/_noop_bulk"),
new Route(POST, "/{index}/{type}/_noop_bulk"),
new Route(PUT, "/{index}/{type}/_noop_bulk")));
return unmodifiableList(
asList(
new Route(POST, "/_noop_bulk"),
new Route(PUT, "/_noop_bulk"),
new Route(POST, "/{index}/_noop_bulk"),
new Route(PUT, "/{index}/_noop_bulk"),
new Route(POST, "/{index}/{type}/_noop_bulk"),
new Route(PUT, "/{index}/{type}/_noop_bulk")
)
);
}
@Override
@ -91,8 +94,17 @@ public class RestNoopBulkAction extends BaseRestHandler {
}
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.setRefreshPolicy(request.param("refresh"));
bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting,
null, defaultPipeline, defaultRequireAlias, true, request.getXContentType());
bulkRequest.add(
request.requiredContent(),
defaultIndex,
defaultType,
defaultRouting,
null,
defaultPipeline,
defaultRequireAlias,
true,
request.getXContentType()
);
// short circuit the call to the transport layer
return channel -> {
@ -102,12 +114,14 @@ public class RestNoopBulkAction extends BaseRestHandler {
}
private static class BulkRestBuilderListener extends RestBuilderListener<BulkRequest> {
private final BulkItemResponse ITEM_RESPONSE = new BulkItemResponse(1, DocWriteRequest.OpType.UPDATE,
new UpdateResponse(new ShardId("mock", "", 1), "mock_type", "1", 0L, 1L, 1L, DocWriteResponse.Result.CREATED));
private final BulkItemResponse ITEM_RESPONSE = new BulkItemResponse(
1,
DocWriteRequest.OpType.UPDATE,
new UpdateResponse(new ShardId("mock", "", 1), "mock_type", "1", 0L, 1L, 1L, DocWriteResponse.Result.CREATED)
);
private final RestRequest request;
BulkRestBuilderListener(RestChannel channel, RestRequest request) {
super(channel);
this.request = request;

View File

@ -46,8 +46,11 @@ import org.opensearch.tasks.Task;
import org.opensearch.transport.TransportService;
public class TransportNoopBulkAction extends HandledTransportAction<BulkRequest, BulkResponse> {
private static final BulkItemResponse ITEM_RESPONSE = new BulkItemResponse(1, DocWriteRequest.OpType.UPDATE,
new UpdateResponse(new ShardId("mock", "", 1), "mock_type", "1", 0L, 1L, 1L, DocWriteResponse.Result.CREATED));
private static final BulkItemResponse ITEM_RESPONSE = new BulkItemResponse(
1,
DocWriteRequest.OpType.UPDATE,
new UpdateResponse(new ShardId("mock", "", 1), "mock_type", "1", 0L, 1L, 1L, DocWriteResponse.Result.CREATED)
);
@Inject
public TransportNoopBulkAction(TransportService transportService, ActionFilters actionFilters) {

View File

@ -48,13 +48,16 @@ public class RestNoopSearchAction extends BaseRestHandler {
@Override
public List<Route> routes() {
return unmodifiableList(asList(
new Route(GET, "/_noop_search"),
new Route(POST, "/_noop_search"),
new Route(GET, "/{index}/_noop_search"),
new Route(POST, "/{index}/_noop_search"),
new Route(GET, "/{index}/{type}/_noop_search"),
new Route(POST, "/{index}/{type}/_noop_search")));
return unmodifiableList(
asList(
new Route(GET, "/_noop_search"),
new Route(POST, "/_noop_search"),
new Route(GET, "/{index}/_noop_search"),
new Route(POST, "/{index}/_noop_search"),
new Route(GET, "/{index}/{type}/_noop_search"),
new Route(POST, "/{index}/{type}/_noop_search")
)
);
}
@Override

View File

@ -59,12 +59,25 @@ public class TransportNoopSearchAction extends HandledTransportAction<SearchRequ
@Override
protected void doExecute(Task task, SearchRequest request, ActionListener<SearchResponse> listener) {
listener.onResponse(new SearchResponse(new InternalSearchResponse(
new SearchHits(
new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f),
InternalAggregations.EMPTY,
new Suggest(Collections.emptyList()),
new SearchProfileShardResults(Collections.emptyMap()), false, false, 1),
"", 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY));
listener.onResponse(
new SearchResponse(
new InternalSearchResponse(
new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f),
InternalAggregations.EMPTY,
new Suggest(Collections.emptyList()),
new SearchProfileShardResults(Collections.emptyMap()),
false,
false,
1
),
"",
1,
1,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
)
);
}
}

View File

@ -73,9 +73,14 @@ public final class ClusterClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public ClusterUpdateSettingsResponse putSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(clusterUpdateSettingsRequest, ClusterRequestConverters::clusterPutSettings,
options, ClusterUpdateSettingsResponse::fromXContent, emptySet());
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
clusterUpdateSettingsRequest,
ClusterRequestConverters::clusterPutSettings,
options,
ClusterUpdateSettingsResponse::fromXContent,
emptySet()
);
}
/**
@ -86,11 +91,19 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable putSettingsAsync(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, RequestOptions options,
ActionListener<ClusterUpdateSettingsResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(clusterUpdateSettingsRequest,
ClusterRequestConverters::clusterPutSettings,
options, ClusterUpdateSettingsResponse::fromXContent, listener, emptySet());
public Cancellable putSettingsAsync(
ClusterUpdateSettingsRequest clusterUpdateSettingsRequest,
RequestOptions options,
ActionListener<ClusterUpdateSettingsResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
clusterUpdateSettingsRequest,
ClusterRequestConverters::clusterPutSettings,
options,
ClusterUpdateSettingsResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -103,8 +116,13 @@ public final class ClusterClient {
*/
public ClusterGetSettingsResponse getSettings(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(clusterGetSettingsRequest, ClusterRequestConverters::clusterGetSettings,
options, ClusterGetSettingsResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
clusterGetSettingsRequest,
ClusterRequestConverters::clusterGetSettings,
options,
ClusterGetSettingsResponse::fromXContent,
emptySet()
);
}
/**
@ -115,11 +133,19 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getSettingsAsync(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options,
ActionListener<ClusterGetSettingsResponse> listener) {
public Cancellable getSettingsAsync(
ClusterGetSettingsRequest clusterGetSettingsRequest,
RequestOptions options,
ActionListener<ClusterGetSettingsResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
clusterGetSettingsRequest, ClusterRequestConverters::clusterGetSettings,
options, ClusterGetSettingsResponse::fromXContent, listener, emptySet());
clusterGetSettingsRequest,
ClusterRequestConverters::clusterGetSettings,
options,
ClusterGetSettingsResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -133,8 +159,13 @@ public final class ClusterClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public ClusterHealthResponse health(ClusterHealthRequest healthRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(healthRequest, ClusterRequestConverters::clusterHealth, options,
ClusterHealthResponse::fromXContent, singleton(RestStatus.REQUEST_TIMEOUT.getStatus()));
return restHighLevelClient.performRequestAndParseEntity(
healthRequest,
ClusterRequestConverters::clusterHealth,
options,
ClusterHealthResponse::fromXContent,
singleton(RestStatus.REQUEST_TIMEOUT.getStatus())
);
}
/**
@ -146,10 +177,19 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable healthAsync(ClusterHealthRequest healthRequest, RequestOptions options,
ActionListener<ClusterHealthResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(healthRequest, ClusterRequestConverters::clusterHealth, options,
ClusterHealthResponse::fromXContent, listener, singleton(RestStatus.REQUEST_TIMEOUT.getStatus()));
public Cancellable healthAsync(
ClusterHealthRequest healthRequest,
RequestOptions options,
ActionListener<ClusterHealthResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
healthRequest,
ClusterRequestConverters::clusterHealth,
options,
ClusterHealthResponse::fromXContent,
listener,
singleton(RestStatus.REQUEST_TIMEOUT.getStatus())
);
}
/**
@ -161,8 +201,13 @@ public final class ClusterClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public RemoteInfoResponse remoteInfo(RemoteInfoRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, ClusterRequestConverters::remoteInfo, options,
RemoteInfoResponse::fromXContent, singleton(RestStatus.REQUEST_TIMEOUT.getStatus()));
return restHighLevelClient.performRequestAndParseEntity(
request,
ClusterRequestConverters::remoteInfo,
options,
RemoteInfoResponse::fromXContent,
singleton(RestStatus.REQUEST_TIMEOUT.getStatus())
);
}
/**
@ -173,10 +218,15 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable remoteInfoAsync(RemoteInfoRequest request, RequestOptions options,
ActionListener<RemoteInfoResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request, ClusterRequestConverters::remoteInfo, options,
RemoteInfoResponse::fromXContent, listener, singleton(RestStatus.REQUEST_TIMEOUT.getStatus()));
public Cancellable remoteInfoAsync(RemoteInfoRequest request, RequestOptions options, ActionListener<RemoteInfoResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
ClusterRequestConverters::remoteInfo,
options,
RemoteInfoResponse::fromXContent,
listener,
singleton(RestStatus.REQUEST_TIMEOUT.getStatus())
);
}
/**
@ -187,8 +237,13 @@ public final class ClusterClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse deleteComponentTemplate(DeleteComponentTemplateRequest req, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(req, ClusterRequestConverters::deleteComponentTemplate,
options, AcknowledgedResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
req,
ClusterRequestConverters::deleteComponentTemplate,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -199,10 +254,19 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable deleteComponentTemplateAsync(DeleteComponentTemplateRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request, ClusterRequestConverters::deleteComponentTemplate,
options, AcknowledgedResponse::fromXContent, listener, emptySet());
public Cancellable deleteComponentTemplateAsync(
DeleteComponentTemplateRequest request,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
ClusterRequestConverters::deleteComponentTemplate,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -213,10 +277,15 @@ public final class ClusterClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse putComponentTemplate(PutComponentTemplateRequest putComponentTemplateRequest,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(putComponentTemplateRequest, ClusterRequestConverters::putComponentTemplate,
options, AcknowledgedResponse::fromXContent, emptySet());
public AcknowledgedResponse putComponentTemplate(PutComponentTemplateRequest putComponentTemplateRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
putComponentTemplateRequest,
ClusterRequestConverters::putComponentTemplate,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -227,10 +296,19 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable putComponentTemplateAsync(PutComponentTemplateRequest putComponentTemplateRequest,
RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(putComponentTemplateRequest,
ClusterRequestConverters::putComponentTemplate, options, AcknowledgedResponse::fromXContent, listener, emptySet());
public Cancellable putComponentTemplateAsync(
PutComponentTemplateRequest putComponentTemplateRequest,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
putComponentTemplateRequest,
ClusterRequestConverters::putComponentTemplate,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -240,10 +318,17 @@ public final class ClusterClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public GetComponentTemplatesResponse getComponentTemplate(GetComponentTemplatesRequest getComponentTemplatesRequest,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(getComponentTemplatesRequest,
ClusterRequestConverters::getComponentTemplates, options, GetComponentTemplatesResponse::fromXContent, emptySet());
public GetComponentTemplatesResponse getComponentTemplate(
GetComponentTemplatesRequest getComponentTemplatesRequest,
RequestOptions options
) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
getComponentTemplatesRequest,
ClusterRequestConverters::getComponentTemplates,
options,
GetComponentTemplatesResponse::fromXContent,
emptySet()
);
}
/**
@ -253,10 +338,19 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getComponentTemplateAsync(GetComponentTemplatesRequest getComponentTemplatesRequest, RequestOptions options,
ActionListener<GetComponentTemplatesResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(getComponentTemplatesRequest,
ClusterRequestConverters::getComponentTemplates, options, GetComponentTemplatesResponse::fromXContent, listener, emptySet());
public Cancellable getComponentTemplateAsync(
GetComponentTemplatesRequest getComponentTemplatesRequest,
RequestOptions options,
ActionListener<GetComponentTemplatesResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
getComponentTemplatesRequest,
ClusterRequestConverters::getComponentTemplates,
options,
GetComponentTemplatesResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -267,10 +361,15 @@ public final class ClusterClient {
* @return true if any index templates in the request exist, false otherwise
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public boolean existsComponentTemplate(ComponentTemplatesExistRequest componentTemplatesRequest,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequest(componentTemplatesRequest,
ClusterRequestConverters::componentTemplatesExist, options, RestHighLevelClient::convertExistsResponse, emptySet());
public boolean existsComponentTemplate(ComponentTemplatesExistRequest componentTemplatesRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequest(
componentTemplatesRequest,
ClusterRequestConverters::componentTemplatesExist,
options,
RestHighLevelClient::convertExistsResponse,
emptySet()
);
}
/**
@ -280,11 +379,19 @@ public final class ClusterClient {
* @param listener the listener to be notified upon request completion. The listener will be called with the value {@code true}
* @return cancellable that may be used to cancel the request
*/
public Cancellable existsComponentTemplateAsync(ComponentTemplatesExistRequest componentTemplatesRequest,
RequestOptions options,
ActionListener<Boolean> listener) {
public Cancellable existsComponentTemplateAsync(
ComponentTemplatesExistRequest componentTemplatesRequest,
RequestOptions options,
ActionListener<Boolean> listener
) {
return restHighLevelClient.performRequestAsync(componentTemplatesRequest,
ClusterRequestConverters::componentTemplatesExist, options, RestHighLevelClient::convertExistsResponse, listener, emptySet());
return restHighLevelClient.performRequestAsync(
componentTemplatesRequest,
ClusterRequestConverters::componentTemplatesExist,
options,
RestHighLevelClient::convertExistsResponse,
listener,
emptySet()
);
}
}

View File

@ -76,14 +76,12 @@ final class ClusterRequestConverters {
static Request clusterHealth(ClusterHealthRequest healthRequest) {
String[] indices = healthRequest.indices() == null ? Strings.EMPTY_ARRAY : healthRequest.indices();
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_cluster/health")
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_cluster/health")
.addCommaSeparatedPathParts(indices)
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params()
.withWaitForStatus(healthRequest.waitForStatus())
RequestConverters.Params params = new RequestConverters.Params().withWaitForStatus(healthRequest.waitForStatus())
.withWaitForNoRelocatingShards(healthRequest.waitForNoRelocatingShards())
.withWaitForNoInitializingShards(healthRequest.waitForNoInitializingShards())
.withWaitForActiveShards(healthRequest.waitForActiveShards(), ActiveShardCount.NONE)
@ -103,7 +101,8 @@ final class ClusterRequestConverters {
static Request putComponentTemplate(PutComponentTemplateRequest putComponentTemplateRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_component_template")
.addPathPart(putComponentTemplateRequest.name()).build();
.addPathPart(putComponentTemplateRequest.name())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putComponentTemplateRequest.masterNodeTimeout());
@ -118,9 +117,8 @@ final class ClusterRequestConverters {
return request;
}
static Request getComponentTemplates(GetComponentTemplatesRequest getComponentTemplatesRequest){
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_component_template")
static Request getComponentTemplates(GetComponentTemplatesRequest getComponentTemplatesRequest) {
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_component_template")
.addPathPart(getComponentTemplatesRequest.name())
.build();
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
@ -132,8 +130,7 @@ final class ClusterRequestConverters {
}
static Request componentTemplatesExist(ComponentTemplatesExistRequest componentTemplatesRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_component_template")
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_component_template")
.addPathPart(componentTemplatesRequest.name())
.build();
final Request request = new Request(HttpHead.METHOD_NAME, endpoint);

View File

@ -37,7 +37,6 @@ import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.opensearch.client.Request;
import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
@ -88,7 +87,8 @@ final class IndicesRequestConverters {
static Request putDataStream(CreateDataStreamRequest createDataStreamRequest) {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_data_stream")
.addPathPart(createDataStreamRequest.getName()).build();
.addPathPart(createDataStreamRequest.getName())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
return request;
}
@ -101,8 +101,7 @@ final class IndicesRequestConverters {
}
static Request getDataStreams(GetDataStreamRequest dataStreamRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_stream")
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_data_stream")
.addPathPart(dataStreamRequest.getName())
.build();
return new Request(HttpGet.METHOD_NAME, endpoint);
@ -110,8 +109,7 @@ final class IndicesRequestConverters {
static Request dataStreamsStats(DataStreamsStatsRequest dataStreamsStatsRequest) {
String[] expressions = dataStreamsStatsRequest.indices() == null ? Strings.EMPTY_ARRAY : dataStreamsStatsRequest.indices();
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_data_stream")
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_data_stream")
.addCommaSeparatedPathParts(expressions)
.addPathPartAsIs("_stats")
.build();
@ -156,8 +154,7 @@ final class IndicesRequestConverters {
}
static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPart(createIndexRequest.index()).build();
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(createIndexRequest.index()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
@ -169,8 +166,7 @@ final class IndicesRequestConverters {
return request;
}
static Request createIndex(org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest)
throws IOException {
static Request createIndex(org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest) throws IOException {
String endpoint = RequestConverters.endpoint(createIndexRequest.indices());
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
@ -195,7 +191,6 @@ final class IndicesRequestConverters {
return request;
}
static Request putMapping(PutMappingRequest putMappingRequest) throws IOException {
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping"));
@ -219,8 +214,10 @@ final class IndicesRequestConverters {
throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API");
}
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(),
"_mapping", putMappingRequest.type()));
Request request = new Request(
HttpPut.METHOD_NAME,
RequestConverters.endpoint(putMappingRequest.indices(), "_mapping", putMappingRequest.type())
);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(putMappingRequest.timeout());
@ -264,8 +261,7 @@ final class IndicesRequestConverters {
String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices();
String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields();
String endpoint = new RequestConverters.EndpointBuilder()
.addCommaSeparatedPathParts(indices)
String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(indices)
.addPathPartAsIs("_mapping")
.addPathPartAsIs("field")
.addCommaSeparatedPathParts(fields)
@ -287,8 +283,10 @@ final class IndicesRequestConverters {
String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields();
String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(indices)
.addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types)
.addPathPartAsIs("field").addCommaSeparatedPathParts(fields)
.addPathPartAsIs("_mapping")
.addCommaSeparatedPathParts(types)
.addPathPartAsIs("field")
.addCommaSeparatedPathParts(fields)
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
@ -347,7 +345,7 @@ final class IndicesRequestConverters {
}
static Request clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest) {
String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices();
String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY : clearIndicesCacheRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear"));
RequestConverters.Params parameters = new RequestConverters.Params();
@ -361,8 +359,8 @@ final class IndicesRequestConverters {
}
static Request existsAlias(GetAliasesRequest getAliasesRequest) {
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) &&
(getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) {
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0)
&& (getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) {
throw new IllegalArgumentException("existsAlias requires at least an alias or an index");
}
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
@ -418,8 +416,9 @@ final class IndicesRequestConverters {
private static Request resize(ResizeRequest resizeRequest, ResizeType type) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex())
.addPathPartAsIs("_" + type.name().toLowerCase(Locale.ROOT))
.addPathPart(resizeRequest.getTargetIndex()).build();
.addPathPartAsIs("_" + type.name().toLowerCase(Locale.ROOT))
.addPathPart(resizeRequest.getTargetIndex())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
@ -435,7 +434,8 @@ final class IndicesRequestConverters {
private static Request resize(org.opensearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex())
.addPathPartAsIs("_" + resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT))
.addPathPart(resizeRequest.getTargetIndexRequest().index()).build();
.addPathPart(resizeRequest.getTargetIndexRequest().index())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
@ -448,8 +448,10 @@ final class IndicesRequestConverters {
}
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName()).build();
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getAlias())
.addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName())
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
@ -467,7 +469,9 @@ final class IndicesRequestConverters {
@Deprecated
static Request rollover(org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getRolloverTarget())
.addPathPartAsIs("_rollover").addPathPart(rolloverRequest.getNewIndexName()).build();
.addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName())
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
@ -597,9 +601,10 @@ final class IndicesRequestConverters {
*/
@Deprecated
static Request putTemplate(org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest)
throws IOException {
throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
.addPathPart(putIndexTemplateRequest.name()).build();
.addPathPart(putIndexTemplateRequest.name())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
@ -617,7 +622,8 @@ final class IndicesRequestConverters {
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
.addPathPart(putIndexTemplateRequest.name()).build();
.addPathPart(putIndexTemplateRequest.name())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
@ -634,7 +640,8 @@ final class IndicesRequestConverters {
static Request putIndexTemplate(PutComposableIndexTemplateRequest putIndexTemplateRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template")
.addPathPart(putIndexTemplateRequest.name()).build();
.addPathPart(putIndexTemplateRequest.name())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
@ -651,7 +658,8 @@ final class IndicesRequestConverters {
static Request simulateIndexTemplate(SimulateIndexTemplateRequest simulateIndexTemplateRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template", "_simulate_index")
.addPathPart(simulateIndexTemplateRequest.indexName()).build();
.addPathPart(simulateIndexTemplateRequest.indexName())
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(simulateIndexTemplateRequest.masterNodeTimeout());
@ -663,8 +671,9 @@ final class IndicesRequestConverters {
if (Strings.hasText(putComposableIndexTemplateRequest.cause())) {
params.putParam("cause", putComposableIndexTemplateRequest.cause());
}
request.setEntity(RequestConverters.createEntity(putComposableIndexTemplateRequest,
RequestConverters.REQUEST_BODY_CONTENT_TYPE));
request.setEntity(
RequestConverters.createEntity(putComposableIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)
);
}
request.addParameters(params.asMap());
return request;
@ -707,8 +716,7 @@ final class IndicesRequestConverters {
}
private static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest, boolean includeTypeName) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_template")
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
.addCommaSeparatedPathParts(getIndexTemplatesRequest.names())
.build();
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
@ -723,8 +731,7 @@ final class IndicesRequestConverters {
}
static Request getIndexTemplates(GetComposableIndexTemplateRequest getIndexTemplatesRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_index_template")
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template")
.addPathPart(getIndexTemplatesRequest.name())
.build();
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
@ -736,8 +743,7 @@ final class IndicesRequestConverters {
}
static Request templatesExist(IndexTemplatesExistRequest indexTemplatesExistRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_template")
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
.addCommaSeparatedPathParts(indexTemplatesExistRequest.names())
.build();
final Request request = new Request(HttpHead.METHOD_NAME, endpoint);
@ -749,8 +755,7 @@ final class IndicesRequestConverters {
}
static Request templatesExist(ComposableIndexTemplateExistRequest indexTemplatesExistRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_index_template")
final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template")
.addPathPart(indexTemplatesExistRequest.name())
.build();
final Request request = new Request(HttpHead.METHOD_NAME, endpoint);
@ -794,10 +799,10 @@ final class IndicesRequestConverters {
}
static Request deleteAlias(DeleteAliasRequest deleteAliasRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPart(deleteAliasRequest.getIndex())
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(deleteAliasRequest.getIndex())
.addPathPartAsIs("_alias")
.addPathPart(deleteAliasRequest.getAlias()).build();
.addPathPart(deleteAliasRequest.getAlias())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(deleteAliasRequest.timeout());

View File

@ -32,8 +32,6 @@
package org.opensearch.client;
import org.opensearch.client.Cancellable;
import org.opensearch.client.RequestOptions;
import org.opensearch.action.ActionListener;
import org.opensearch.action.ingest.DeletePipelineRequest;
import org.opensearch.action.ingest.GetPipelineRequest;
@ -69,8 +67,13 @@ public final class IngestClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::putPipeline, options,
AcknowledgedResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
request,
IngestRequestConverters::putPipeline,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -82,8 +85,14 @@ public final class IngestClient {
* @return cancellable that may be used to cancel the request
*/
public Cancellable putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::putPipeline, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
IngestRequestConverters::putPipeline,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -95,8 +104,13 @@ public final class IngestClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options,
GetPipelineResponse::fromXContent, Collections.singleton(404));
return restHighLevelClient.performRequestAndParseEntity(
request,
IngestRequestConverters::getPipeline,
options,
GetPipelineResponse::fromXContent,
Collections.singleton(404)
);
}
/**
@ -108,8 +122,14 @@ public final class IngestClient {
* @return cancellable that may be used to cancel the request
*/
public Cancellable getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener<GetPipelineResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options,
GetPipelineResponse::fromXContent, listener, Collections.singleton(404));
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
IngestRequestConverters::getPipeline,
options,
GetPipelineResponse::fromXContent,
listener,
Collections.singleton(404)
);
}
/**
@ -121,8 +141,13 @@ public final class IngestClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::deletePipeline, options,
AcknowledgedResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
request,
IngestRequestConverters::deletePipeline,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -133,11 +158,19 @@ public final class IngestClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable deletePipelineAsync(DeletePipelineRequest request, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity( request,
IngestRequestConverters::deletePipeline, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
public Cancellable deletePipelineAsync(
DeletePipelineRequest request,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
IngestRequestConverters::deletePipeline,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -150,8 +183,13 @@ public final class IngestClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
SimulatePipelineResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
request,
IngestRequestConverters::simulatePipeline,
options,
SimulatePipelineResponse::fromXContent,
emptySet()
);
}
/**
@ -163,10 +201,18 @@ public final class IngestClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable simulateAsync(SimulatePipelineRequest request,
RequestOptions options,
ActionListener<SimulatePipelineResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
SimulatePipelineResponse::fromXContent, listener, emptySet());
public Cancellable simulateAsync(
SimulatePipelineRequest request,
RequestOptions options,
ActionListener<SimulatePipelineResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
IngestRequestConverters::simulatePipeline,
options,
SimulatePipelineResponse::fromXContent,
listener,
emptySet()
);
}
}

View File

@ -40,7 +40,6 @@ import org.opensearch.action.ingest.DeletePipelineRequest;
import org.opensearch.action.ingest.GetPipelineRequest;
import org.opensearch.action.ingest.PutPipelineRequest;
import org.opensearch.action.ingest.SimulatePipelineRequest;
import org.opensearch.client.Request;
import java.io.IOException;
@ -49,8 +48,7 @@ final class IngestRequestConverters {
private IngestRequestConverters() {}
static Request getPipeline(GetPipelineRequest getPipelineRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_ingest/pipeline")
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline")
.addCommaSeparatedPathParts(getPipelineRequest.getIds())
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
@ -62,8 +60,7 @@ final class IngestRequestConverters {
}
static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_ingest/pipeline")
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline")
.addPathPart(putPipelineRequest.getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
@ -77,8 +74,7 @@ final class IngestRequestConverters {
}
static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_ingest/pipeline")
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline")
.addPathPart(deletePipelineRequest.getId())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);

View File

@ -58,23 +58,28 @@ public final class NodesResponseHeader {
public static final ParseField FAILURES = new ParseField("failures");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<NodesResponseHeader, Void> PARSER =
new ConstructingObjectParser<>("nodes_response_header", true,
(a) -> {
int i = 0;
int total = (Integer) a[i++];
int successful = (Integer) a[i++];
int failed = (Integer) a[i++];
List<OpenSearchException> failures = (List<OpenSearchException>) a[i++];
return new NodesResponseHeader(total, successful, failed, failures);
});
public static final ConstructingObjectParser<NodesResponseHeader, Void> PARSER = new ConstructingObjectParser<>(
"nodes_response_header",
true,
(a) -> {
int i = 0;
int total = (Integer) a[i++];
int successful = (Integer) a[i++];
int failed = (Integer) a[i++];
List<OpenSearchException> failures = (List<OpenSearchException>) a[i++];
return new NodesResponseHeader(total, successful, failed, failures);
}
);
static {
PARSER.declareInt(ConstructingObjectParser.constructorArg(), TOTAL);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), SUCCESSFUL);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), FAILED);
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> OpenSearchException.fromXContent(p), FAILURES);
PARSER.declareObjectArray(
ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> OpenSearchException.fromXContent(p),
FAILURES
);
}
private final int total;
@ -135,10 +140,7 @@ public final class NodesResponseHeader {
return false;
}
NodesResponseHeader that = (NodesResponseHeader) o;
return total == that.total &&
successful == that.successful &&
failed == that.failed &&
Objects.equals(failures, that.failures);
return total == that.total && successful == that.successful && failed == that.failed && Objects.equals(failures, that.failures);
}
@Override

View File

@ -243,14 +243,19 @@ final class RequestConverters {
BytesReference indexSource = indexRequest.source();
XContentType indexXContentType = indexRequest.getContentType();
try (XContentParser parser = XContentHelper.createParser(
try (
XContentParser parser = XContentHelper.createParser(
/*
* EMPTY and THROW are fine here because we just call
* copyCurrentStructure which doesn't touch the
* registry or deprecation.
*/
NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
indexSource, indexXContentType)) {
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
indexSource,
indexXContentType
)
) {
try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) {
builder.copyCurrentStructure(parser);
source = BytesReference.bytes(builder).toBytesRef();
@ -398,8 +403,14 @@ final class RequestConverters {
if (updateRequest.upsertRequest() != null) {
XContentType upsertContentType = updateRequest.upsertRequest().getContentType();
if ((xContentType != null) && (xContentType != upsertContentType)) {
throw new IllegalStateException("Update request cannot have different content types for doc [" + xContentType + "]" +
" and upsert [" + upsertContentType + "] documents");
throw new IllegalStateException(
"Update request cannot have different content types for doc ["
+ xContentType
+ "]"
+ " and upsert ["
+ upsertContentType
+ "] documents"
);
} else {
xContentType = upsertContentType;
}
@ -523,10 +534,10 @@ final class RequestConverters {
params.withRouting(countRequest.routing());
params.withPreference(countRequest.preference());
params.withIndicesOptions(countRequest.indicesOptions());
if (countRequest.terminateAfter() != 0){
if (countRequest.terminateAfter() != 0) {
params.withTerminateAfter(countRequest.terminateAfter());
}
if (countRequest.minScore() != null){
if (countRequest.minScore() != null) {
params.putParam("min_score", String.valueOf(countRequest.minScore()));
}
request.addParameters(params.asMap());
@ -551,7 +562,7 @@ final class RequestConverters {
}
static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) throws IOException {
String methodName = fieldCapabilitiesRequest.indexFilter() != null ? HttpPost.METHOD_NAME : HttpGet.METHOD_NAME;
String methodName = fieldCapabilitiesRequest.indexFilter() != null ? HttpPost.METHOD_NAME : HttpGet.METHOD_NAME;
Request request = new Request(methodName, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps"));
Params params = new Params();
@ -602,8 +613,7 @@ final class RequestConverters {
private static Request prepareReindexRequest(ReindexRequest reindexRequest, boolean waitForCompletion) throws IOException {
String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params()
.withWaitForCompletion(waitForCompletion)
Params params = new Params().withWaitForCompletion(waitForCompletion)
.withRefresh(reindexRequest.isRefresh())
.withTimeout(reindexRequest.getTimeout())
.withWaitForActiveShards(reindexRequest.getWaitForActiveShards())
@ -618,13 +628,11 @@ final class RequestConverters {
return request;
}
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest,
boolean waitForCompletion) throws IOException {
String endpoint =
endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, boolean waitForCompletion)
throws IOException {
String endpoint = endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params()
.withRouting(deleteByQueryRequest.getRouting())
Params params = new Params().withRouting(deleteByQueryRequest.getRouting())
.withRefresh(deleteByQueryRequest.isRefresh())
.withTimeout(deleteByQueryRequest.getTimeout())
.withWaitForActiveShards(deleteByQueryRequest.getWaitForActiveShards())
@ -649,13 +657,10 @@ final class RequestConverters {
return request;
}
static Request prepareUpdateByQueryRequest(UpdateByQueryRequest updateByQueryRequest,
boolean waitForCompletion) throws IOException {
String endpoint =
endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
static Request prepareUpdateByQueryRequest(UpdateByQueryRequest updateByQueryRequest, boolean waitForCompletion) throws IOException {
String endpoint = endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params()
.withRouting(updateByQueryRequest.getRouting())
Params params = new Params().withRouting(updateByQueryRequest.getRouting())
.withPipeline(updateByQueryRequest.getPipeline())
.withRefresh(updateByQueryRequest.isRefresh())
.withTimeout(updateByQueryRequest.getTimeout())
@ -694,11 +699,12 @@ final class RequestConverters {
}
private static Request rethrottle(RethrottleRequest rethrottleRequest, String firstPathPart) {
String endpoint = new EndpointBuilder().addPathPart(firstPathPart).addPathPart(rethrottleRequest.getTaskId().toString())
.addPathPart("_rethrottle").build();
String endpoint = new EndpointBuilder().addPathPart(firstPathPart)
.addPathPart(rethrottleRequest.getTaskId().toString())
.addPathPart("_rethrottle")
.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params()
.withRequestsPerSecond(rethrottleRequest.getRequestsPerSecond());
Params params = new Params().withRequestsPerSecond(rethrottleRequest.getRequestsPerSecond());
// we set "group_by" to "none" because this is the response format we can parse back
params.putParam("group_by", "none");
request.addParameters(params.asMap());
@ -807,13 +813,17 @@ final class RequestConverters {
}
static String endpoint(String[] indices, String[] types, String endpoint) {
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addCommaSeparatedPathParts(types)
.addPathPartAsIs(endpoint).build();
return new EndpointBuilder().addCommaSeparatedPathParts(indices)
.addCommaSeparatedPathParts(types)
.addPathPartAsIs(endpoint)
.build();
}
static String endpoint(String[] indices, String endpoint, String[] suffixes) {
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint)
.addCommaSeparatedPathParts(suffixes).build();
return new EndpointBuilder().addCommaSeparatedPathParts(indices)
.addPathPartAsIs(endpoint)
.addCommaSeparatedPathParts(suffixes)
.build();
}
static String endpoint(String[] indices, String endpoint, String type) {
@ -836,14 +846,13 @@ final class RequestConverters {
* a {@link Request} and adds the parameters to it directly.
*/
static class Params {
private final Map<String,String> parameters = new HashMap<>();
private final Map<String, String> parameters = new HashMap<>();
Params() {
}
Params() {}
Params putParam(String name, String value) {
if (Strings.hasLength(value)) {
parameters.put(name,value);
parameters.put(name, value);
}
return this;
}
@ -855,7 +864,7 @@ final class RequestConverters {
return this;
}
Map<String, String> asMap(){
Map<String, String> asMap() {
return parameters;
}
@ -981,7 +990,7 @@ final class RequestConverters {
return this;
}
Params withTerminateAfter(int terminateAfter){
Params withTerminateAfter(int terminateAfter) {
return putParam("terminate_after", String.valueOf(terminateAfter));
}
@ -1097,7 +1106,7 @@ final class RequestConverters {
}
Params withNodes(String[] nodes) {
return withNodes(Arrays.asList(nodes));
return withNodes(Arrays.asList(nodes));
}
Params withNodes(List<String> nodes) {
@ -1192,15 +1201,23 @@ final class RequestConverters {
static XContentType enforceSameContentType(IndexRequest indexRequest, @Nullable XContentType xContentType) {
XContentType requestContentType = indexRequest.getContentType();
if (requestContentType != XContentType.JSON && requestContentType != XContentType.SMILE) {
throw new IllegalArgumentException("Unsupported content-type found for request with content-type [" + requestContentType
+ "], only JSON and SMILE are supported");
throw new IllegalArgumentException(
"Unsupported content-type found for request with content-type ["
+ requestContentType
+ "], only JSON and SMILE are supported"
);
}
if (xContentType == null) {
return requestContentType;
}
if (requestContentType != xContentType) {
throw new IllegalArgumentException("Mismatching content-type found for request with content-type [" + requestContentType
+ "], previous requests have content-type [" + xContentType + "]");
throw new IllegalArgumentException(
"Mismatching content-type found for request with content-type ["
+ requestContentType
+ "], previous requests have content-type ["
+ xContentType
+ "]"
);
}
return xContentType;
}
@ -1231,7 +1248,7 @@ final class RequestConverters {
return this;
}
EndpointBuilder addPathPartAsIs(String ... parts) {
EndpointBuilder addPathPartAsIs(String... parts) {
for (String part : parts) {
if (Strings.hasLength(part)) {
joiner.add(part);
@ -1246,13 +1263,13 @@ final class RequestConverters {
private static String encodePart(String pathPart) {
try {
//encode each part (e.g. index, type and id) separately before merging them into the path
//we prepend "/" to the path part to make this path absolute, otherwise there can be issues with
//paths that start with `-` or contain `:`
//the authority must be an empty string and not null, else paths that being with slashes could have them
//misinterpreted as part of the authority.
// encode each part (e.g. index, type and id) separately before merging them into the path
// we prepend "/" to the path part to make this path absolute, otherwise there can be issues with
// paths that start with `-` or contain `:`
// the authority must be an empty string and not null, else paths that being with slashes could have them
// misinterpreted as part of the authority.
URI uri = new URI(null, "", "/" + pathPart, null, null);
//manually encode any slash that each part may contain
// manually encode any slash that each part may contain
return uri.getRawPath().substring(1).replaceAll("/", "%2F");
} catch (URISyntaxException e) {
throw new IllegalArgumentException("Path part [" + pathPart + "] couldn't be encoded", e);
@ -1260,4 +1277,3 @@ final class RequestConverters {
}
}
}

View File

@ -63,7 +63,7 @@ public class RethrottleRequest implements Validatable {
public RethrottleRequest(TaskId taskId, float requestsPerSecond) {
Objects.requireNonNull(taskId, "taskId cannot be null");
if (requestsPerSecond <= 0) {
throw new IllegalArgumentException("requestsPerSecond needs to be positive value but was [" + requestsPerSecond+"]");
throw new IllegalArgumentException("requestsPerSecond needs to be positive value but was [" + requestsPerSecond + "]");
}
this.taskId = taskId;
this.requestsPerSecond = requestsPerSecond;
@ -85,6 +85,6 @@ public class RethrottleRequest implements Validatable {
@Override
public String toString() {
return "RethrottleRequest: taskID = " + taskId +"; reqestsPerSecond = " + requestsPerSecond;
return "RethrottleRequest: taskID = " + taskId + "; reqestsPerSecond = " + requestsPerSecond;
}
}

View File

@ -32,8 +32,6 @@
package org.opensearch.client;
import org.opensearch.client.Cancellable;
import org.opensearch.client.RequestOptions;
import org.opensearch.action.ActionListener;
import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest;
import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse;
@ -79,10 +77,14 @@ public final class SnapshotClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, SnapshotRequestConverters::getRepositories, options,
GetRepositoriesResponse::fromXContent, emptySet());
public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
getRepositoriesRequest,
SnapshotRequestConverters::getRepositories,
options,
GetRepositoriesResponse::fromXContent,
emptySet()
);
}
/**
@ -94,11 +96,19 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options,
ActionListener<GetRepositoriesResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest,
SnapshotRequestConverters::getRepositories, options,
GetRepositoriesResponse::fromXContent, listener, emptySet());
public Cancellable getRepositoryAsync(
GetRepositoriesRequest getRepositoriesRequest,
RequestOptions options,
ActionListener<GetRepositoriesResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
getRepositoriesRequest,
SnapshotRequestConverters::getRepositories,
options,
GetRepositoriesResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -110,8 +120,13 @@ public final class SnapshotClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse createRepository(PutRepositoryRequest putRepositoryRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, SnapshotRequestConverters::createRepository, options,
AcknowledgedResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
putRepositoryRequest,
SnapshotRequestConverters::createRepository,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -122,11 +137,19 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable createRepositoryAsync(PutRepositoryRequest putRepositoryRequest, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest,
SnapshotRequestConverters::createRepository, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
public Cancellable createRepositoryAsync(
PutRepositoryRequest putRepositoryRequest,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
putRepositoryRequest,
SnapshotRequestConverters::createRepository,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -139,8 +162,13 @@ public final class SnapshotClient {
*/
public AcknowledgedResponse deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, SnapshotRequestConverters::deleteRepository,
options, AcknowledgedResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
deleteRepositoryRequest,
SnapshotRequestConverters::deleteRepository,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -151,11 +179,19 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable deleteRepositoryAsync(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest,
SnapshotRequestConverters::deleteRepository, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
public Cancellable deleteRepositoryAsync(
DeleteRepositoryRequest deleteRepositoryRequest,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
deleteRepositoryRequest,
SnapshotRequestConverters::deleteRepository,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -168,8 +204,13 @@ public final class SnapshotClient {
*/
public VerifyRepositoryResponse verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(verifyRepositoryRequest, SnapshotRequestConverters::verifyRepository,
options, VerifyRepositoryResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
verifyRepositoryRequest,
SnapshotRequestConverters::verifyRepository,
options,
VerifyRepositoryResponse::fromXContent,
emptySet()
);
}
/**
@ -180,11 +221,19 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options,
ActionListener<VerifyRepositoryResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest,
SnapshotRequestConverters::verifyRepository, options,
VerifyRepositoryResponse::fromXContent, listener, emptySet());
public Cancellable verifyRepositoryAsync(
VerifyRepositoryRequest verifyRepositoryRequest,
RequestOptions options,
ActionListener<VerifyRepositoryResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
verifyRepositoryRequest,
SnapshotRequestConverters::verifyRepository,
options,
VerifyRepositoryResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -197,8 +246,13 @@ public final class SnapshotClient {
*/
public CleanupRepositoryResponse cleanupRepository(CleanupRepositoryRequest cleanupRepositoryRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(cleanupRepositoryRequest, SnapshotRequestConverters::cleanupRepository,
options, CleanupRepositoryResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
cleanupRepositoryRequest,
SnapshotRequestConverters::cleanupRepository,
options,
CleanupRepositoryResponse::fromXContent,
emptySet()
);
}
/**
@ -209,19 +263,32 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable cleanupRepositoryAsync(CleanupRepositoryRequest cleanupRepositoryRequest, RequestOptions options,
ActionListener<CleanupRepositoryResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(cleanupRepositoryRequest, SnapshotRequestConverters::cleanupRepository,
options, CleanupRepositoryResponse::fromXContent, listener, emptySet());
public Cancellable cleanupRepositoryAsync(
CleanupRepositoryRequest cleanupRepositoryRequest,
RequestOptions options,
ActionListener<CleanupRepositoryResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
cleanupRepositoryRequest,
SnapshotRequestConverters::cleanupRepository,
options,
CleanupRepositoryResponse::fromXContent,
listener,
emptySet()
);
}
/**
* Creates a snapshot.
*/
public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, SnapshotRequestConverters::createSnapshot, options,
CreateSnapshotResponse::fromXContent, emptySet());
public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
createSnapshotRequest,
SnapshotRequestConverters::createSnapshot,
options,
CreateSnapshotResponse::fromXContent,
emptySet()
);
}
/**
@ -230,20 +297,32 @@ public final class SnapshotClient {
*
* @return cancellable that may be used to cancel the request
*/
public Cancellable createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
ActionListener<CreateSnapshotResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest,
SnapshotRequestConverters::createSnapshot, options,
CreateSnapshotResponse::fromXContent, listener, emptySet());
public Cancellable createAsync(
CreateSnapshotRequest createSnapshotRequest,
RequestOptions options,
ActionListener<CreateSnapshotResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
createSnapshotRequest,
SnapshotRequestConverters::createSnapshot,
options,
CreateSnapshotResponse::fromXContent,
listener,
emptySet()
);
}
/**
* Clones a snapshot.
*/
public AcknowledgedResponse clone(CloneSnapshotRequest cloneSnapshotRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(cloneSnapshotRequest, SnapshotRequestConverters::cloneSnapshot, options,
AcknowledgedResponse::fromXContent, emptySet());
public AcknowledgedResponse clone(CloneSnapshotRequest cloneSnapshotRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
cloneSnapshotRequest,
SnapshotRequestConverters::cloneSnapshot,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -252,11 +331,19 @@ public final class SnapshotClient {
*
* @return cancellable that may be used to cancel the request
*/
public Cancellable cloneAsync(CloneSnapshotRequest cloneSnapshotRequest, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(cloneSnapshotRequest,
SnapshotRequestConverters::cloneSnapshot, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
public Cancellable cloneAsync(
CloneSnapshotRequest cloneSnapshotRequest,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
cloneSnapshotRequest,
SnapshotRequestConverters::cloneSnapshot,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -268,8 +355,13 @@ public final class SnapshotClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, SnapshotRequestConverters::getSnapshots, options,
GetSnapshotsResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
getSnapshotsRequest,
SnapshotRequestConverters::getSnapshots,
options,
GetSnapshotsResponse::fromXContent,
emptySet()
);
}
/**
@ -280,11 +372,19 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options,
ActionListener<GetSnapshotsResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest,
SnapshotRequestConverters::getSnapshots, options,
GetSnapshotsResponse::fromXContent, listener, emptySet());
public Cancellable getAsync(
GetSnapshotsRequest getSnapshotsRequest,
RequestOptions options,
ActionListener<GetSnapshotsResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
getSnapshotsRequest,
SnapshotRequestConverters::getSnapshots,
options,
GetSnapshotsResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -295,10 +395,14 @@ public final class SnapshotClient {
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, SnapshotRequestConverters::snapshotsStatus, options,
SnapshotsStatusResponse::fromXContent, emptySet());
public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
snapshotsStatusRequest,
SnapshotRequestConverters::snapshotsStatus,
options,
SnapshotsStatusResponse::fromXContent,
emptySet()
);
}
/**
@ -309,11 +413,19 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options,
ActionListener<SnapshotsStatusResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest,
SnapshotRequestConverters::snapshotsStatus, options,
SnapshotsStatusResponse::fromXContent, listener, emptySet());
public Cancellable statusAsync(
SnapshotsStatusRequest snapshotsStatusRequest,
RequestOptions options,
ActionListener<SnapshotsStatusResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
snapshotsStatusRequest,
SnapshotRequestConverters::snapshotsStatus,
options,
SnapshotsStatusResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -325,8 +437,13 @@ public final class SnapshotClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public RestoreSnapshotResponse restore(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, SnapshotRequestConverters::restoreSnapshot, options,
RestoreSnapshotResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
restoreSnapshotRequest,
SnapshotRequestConverters::restoreSnapshot,
options,
RestoreSnapshotResponse::fromXContent,
emptySet()
);
}
/**
@ -337,11 +454,19 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable restoreAsync(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options,
ActionListener<RestoreSnapshotResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest,
SnapshotRequestConverters::restoreSnapshot, options,
RestoreSnapshotResponse::fromXContent, listener, emptySet());
public Cancellable restoreAsync(
RestoreSnapshotRequest restoreSnapshotRequest,
RequestOptions options,
ActionListener<RestoreSnapshotResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
restoreSnapshotRequest,
SnapshotRequestConverters::restoreSnapshot,
options,
RestoreSnapshotResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -353,9 +478,13 @@ public final class SnapshotClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest,
SnapshotRequestConverters::deleteSnapshot, options,
AcknowledgedResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
deleteSnapshotRequest,
SnapshotRequestConverters::deleteSnapshot,
options,
AcknowledgedResponse::fromXContent,
emptySet()
);
}
/**
@ -366,10 +495,18 @@ public final class SnapshotClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable deleteAsync(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options,
ActionListener<AcknowledgedResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest,
SnapshotRequestConverters::deleteSnapshot, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
public Cancellable deleteAsync(
DeleteSnapshotRequest deleteSnapshotRequest,
RequestOptions options,
ActionListener<AcknowledgedResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
deleteSnapshotRequest,
SnapshotRequestConverters::deleteSnapshot,
options,
AcknowledgedResponse::fromXContent,
listener,
emptySet()
);
}
}

View File

@ -36,7 +36,6 @@ import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.opensearch.client.Request;
import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest;
import org.opensearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest;
import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
@ -58,7 +57,8 @@ final class SnapshotRequestConverters {
static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) {
String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories();
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addCommaSeparatedPathParts(repositories)
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
.addCommaSeparatedPathParts(repositories)
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
@ -85,7 +85,8 @@ final class SnapshotRequestConverters {
}
static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addPathPart(deleteRepositoryRequest.name())
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot")
.addPathPart(deleteRepositoryRequest.name())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
@ -140,11 +141,11 @@ final class SnapshotRequestConverters {
static Request cloneSnapshot(CloneSnapshotRequest cloneSnapshotRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot")
.addPathPart(cloneSnapshotRequest.repository())
.addPathPart(cloneSnapshotRequest.source())
.addPathPart("_clone")
.addPathPart(cloneSnapshotRequest.target())
.build();
.addPathPart(cloneSnapshotRequest.repository())
.addPathPart(cloneSnapshotRequest.source())
.addPathPart("_clone")
.addPathPart(cloneSnapshotRequest.target())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(cloneSnapshotRequest.masterNodeTimeout());

View File

@ -103,7 +103,7 @@ public class SyncedFlushResponse implements ToXContentObject {
builder.startObject(SHARDS_FIELD);
totalCounts.toXContent(builder, params);
builder.endObject();
for (Map.Entry<String, IndexResult> entry: indexResults.entrySet()) {
for (Map.Entry<String, IndexResult> entry : indexResults.entrySet()) {
String indexName = entry.getKey();
IndexResult indexResult = entry.getValue();
builder.startObject(indexName);
@ -132,10 +132,7 @@ public class SyncedFlushResponse implements ToXContentObject {
if (totalCounts != null) {
return new SyncedFlushResponse(totalCounts, indexResults);
} else {
throw new ParsingException(
startLoc,
"Unable to reconstruct object. Total counts for shards couldn't be parsed."
);
throw new ParsingException(startLoc, "Unable to reconstruct object. Total counts for shards couldn't be parsed.");
}
}
@ -148,11 +145,10 @@ public class SyncedFlushResponse implements ToXContentObject {
public static final String SUCCESSFUL_FIELD = "successful";
public static final String FAILED_FIELD = "failed";
private static final ConstructingObjectParser<ShardCounts, Void> PARSER =
new ConstructingObjectParser<>(
"shardcounts",
a -> new ShardCounts((Integer) a[0], (Integer) a[1], (Integer) a[2])
);
private static final ConstructingObjectParser<ShardCounts, Void> PARSER = new ConstructingObjectParser<>(
"shardcounts",
a -> new ShardCounts((Integer) a[0], (Integer) a[1], (Integer) a[2])
);
static {
PARSER.declareInt(constructorArg(), new ParseField(TOTAL_FIELD));
PARSER.declareInt(constructorArg(), new ParseField(SUCCESSFUL_FIELD));
@ -163,7 +159,6 @@ public class SyncedFlushResponse implements ToXContentObject {
private int successful;
private int failed;
ShardCounts(int total, int successful, int failed) {
this.total = total;
this.successful = successful;
@ -184,10 +179,7 @@ public class SyncedFlushResponse implements ToXContentObject {
public boolean equals(ShardCounts other) {
if (other != null) {
return
other.total == this.total &&
other.successful == this.successful &&
other.failed == this.failed;
return other.total == this.total && other.successful == this.successful && other.failed == this.failed;
} else {
return false;
}
@ -207,11 +199,10 @@ public class SyncedFlushResponse implements ToXContentObject {
public static final String FAILURES_FIELD = "failures";
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<IndexResult, Void> PARSER =
new ConstructingObjectParser<>(
"indexresult",
a -> new IndexResult((Integer) a[0], (Integer) a[1], (Integer) a[2], (List<ShardFailure>)a[3])
);
private static final ConstructingObjectParser<IndexResult, Void> PARSER = new ConstructingObjectParser<>(
"indexresult",
a -> new IndexResult((Integer) a[0], (Integer) a[1], (Integer) a[2], (List<ShardFailure>) a[3])
);
static {
PARSER.declareInt(constructorArg(), new ParseField(TOTAL_FIELD));
PARSER.declareInt(constructorArg(), new ParseField(SUCCESSFUL_FIELD));
@ -297,16 +288,12 @@ public class SyncedFlushResponse implements ToXContentObject {
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<ShardFailure, Void> PARSER = new ConstructingObjectParser<>(
"shardfailure",
a -> new ShardFailure((Integer)a[0], (String)a[1], (Map<String, Object>)a[2])
a -> new ShardFailure((Integer) a[0], (String) a[1], (Map<String, Object>) a[2])
);
static {
PARSER.declareInt(constructorArg(), new ParseField(SHARD_ID_FIELD));
PARSER.declareString(constructorArg(), new ParseField(FAILURE_REASON_FIELD));
PARSER.declareObject(
optionalConstructorArg(),
(parser, c) -> parser.map(),
new ParseField(ROUTING_FIELD)
);
PARSER.declareObject(optionalConstructorArg(), (parser, c) -> parser.map(), new ParseField(ROUTING_FIELD));
}
ShardFailure(int shardId, String failureReason, Map<String, Object> routing) {

View File

@ -64,8 +64,13 @@ public final class TasksClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public ListTasksResponse list(ListTasksRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, TasksRequestConverters::listTasks, options,
ListTasksResponse::fromXContent, emptySet());
return restHighLevelClient.performRequestAndParseEntity(
request,
TasksRequestConverters::listTasks,
options,
ListTasksResponse::fromXContent,
emptySet()
);
}
/**
@ -77,8 +82,14 @@ public final class TasksClient {
* @return cancellable that may be used to cancel the request
*/
public Cancellable listAsync(ListTasksRequest request, RequestOptions options, ActionListener<ListTasksResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request, TasksRequestConverters::listTasks, options,
ListTasksResponse::fromXContent, listener, emptySet());
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
TasksRequestConverters::listTasks,
options,
ListTasksResponse::fromXContent,
listener,
emptySet()
);
}
/**
@ -90,8 +101,12 @@ public final class TasksClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public Optional<GetTaskResponse> get(GetTaskRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseOptionalEntity(request, TasksRequestConverters::getTask, options,
GetTaskResponse::fromXContent);
return restHighLevelClient.performRequestAndParseOptionalEntity(
request,
TasksRequestConverters::getTask,
options,
GetTaskResponse::fromXContent
);
}
/**
@ -102,11 +117,15 @@ public final class TasksClient {
* @param listener an actionlistener that takes an optional response (404s are returned as an empty Optional)
* @return cancellable that may be used to cancel the request
*/
public Cancellable getAsync(GetTaskRequest request, RequestOptions options,
ActionListener<Optional<GetTaskResponse>> listener) {
public Cancellable getAsync(GetTaskRequest request, RequestOptions options, ActionListener<Optional<GetTaskResponse>> listener) {
return restHighLevelClient.performRequestAsyncAndParseOptionalEntity(request, TasksRequestConverters::getTask, options,
GetTaskResponse::fromXContent, listener);
return restHighLevelClient.performRequestAsyncAndParseOptionalEntity(
request,
TasksRequestConverters::getTask,
options,
GetTaskResponse::fromXContent,
listener
);
}
/**
@ -118,7 +137,7 @@ public final class TasksClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*
*/
public CancelTasksResponse cancel(CancelTasksRequest cancelTasksRequest, RequestOptions options ) throws IOException {
public CancelTasksResponse cancel(CancelTasksRequest cancelTasksRequest, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
cancelTasksRequest,
TasksRequestConverters::cancelTasks,
@ -136,8 +155,11 @@ public final class TasksClient {
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable cancelAsync(CancelTasksRequest cancelTasksRequest, RequestOptions options,
ActionListener<CancelTasksResponse> listener) {
public Cancellable cancelAsync(
CancelTasksRequest cancelTasksRequest,
RequestOptions options,
ActionListener<CancelTasksResponse> listener
) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
cancelTasksRequest,
TasksRequestConverters::cancelTasks,

View File

@ -49,9 +49,7 @@ final class TasksRequestConverters {
req.getTimeout().ifPresent(params::withTimeout);
req.getTaskId().ifPresent(params::withTaskId);
req.getParentTaskId().ifPresent(params::withParentTaskId);
params
.withNodes(req.getNodes())
.withActions(req.getActions());
params.withNodes(req.getNodes()).withActions(req.getActions());
if (req.getWaitForCompletion() != null) {
params.withWaitForCompletion(req.getWaitForCompletion());
}
@ -63,7 +61,7 @@ final class TasksRequestConverters {
if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) {
throw new IllegalArgumentException("TaskId cannot be used for list tasks request");
}
Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(listTaskRequest.getTimeout())
.withDetailed(listTaskRequest.getDetailed())
@ -78,12 +76,11 @@ final class TasksRequestConverters {
static Request getTask(GetTaskRequest getTaskRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_tasks")
.addPathPartAsIs(getTaskRequest.getNodeId() + ":" + Long.toString(getTaskRequest.getTaskId()))
.build();
.addPathPartAsIs(getTaskRequest.getNodeId() + ":" + Long.toString(getTaskRequest.getTaskId()))
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(getTaskRequest.getTimeout())
.withWaitForCompletion(getTaskRequest.getWaitForCompletion());
params.withTimeout(getTaskRequest.getTimeout()).withWaitForCompletion(getTaskRequest.getWaitForCompletion());
request.addParameters(params.asMap());
return request;
}

View File

@ -38,7 +38,8 @@ import java.util.Optional;
*/
public interface Validatable {
Validatable EMPTY = new Validatable() {};
Validatable EMPTY = new Validatable() {
};
/**
* Perform validation. This method does not have to be overridden in the event that no validation needs to be done,

View File

@ -84,10 +84,10 @@ public class ProxyModeInfo implements RemoteConnectionInfo.ModeInfo {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ProxyModeInfo otherProxy = (ProxyModeInfo) o;
return maxSocketConnections == otherProxy.maxSocketConnections &&
numSocketsConnected == otherProxy.numSocketsConnected &&
Objects.equals(address, otherProxy.address) &&
Objects.equals(serverName, otherProxy.serverName);
return maxSocketConnections == otherProxy.maxSocketConnections
&& numSocketsConnected == otherProxy.numSocketsConnected
&& Objects.equals(address, otherProxy.address)
&& Objects.equals(serverName, otherProxy.serverName);
}
@Override

View File

@ -55,23 +55,21 @@ public final class RemoteConnectionInfo {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<RemoteConnectionInfo, String> PARSER = new ConstructingObjectParser<>(
"RemoteConnectionInfoObjectParser",
false,
(args, clusterAlias) -> {
String mode = (String) args[1];
ModeInfo modeInfo;
if (mode.equals(ProxyModeInfo.NAME)) {
modeInfo = new ProxyModeInfo((String) args[4], (String) args[5], (int) args[6], (int) args[7]);
} else if (mode.equals(SniffModeInfo.NAME)) {
modeInfo = new SniffModeInfo((List<String>) args[8], (int) args[9], (int) args[10]);
} else {
throw new IllegalArgumentException("mode cannot be " + mode);
}
return new RemoteConnectionInfo(clusterAlias,
modeInfo,
(String) args[2],
(boolean) args[3]);
});
"RemoteConnectionInfoObjectParser",
false,
(args, clusterAlias) -> {
String mode = (String) args[1];
ModeInfo modeInfo;
if (mode.equals(ProxyModeInfo.NAME)) {
modeInfo = new ProxyModeInfo((String) args[4], (String) args[5], (int) args[6], (int) args[7]);
} else if (mode.equals(SniffModeInfo.NAME)) {
modeInfo = new SniffModeInfo((List<String>) args[8], (int) args[9], (int) args[10]);
} else {
throw new IllegalArgumentException("mode cannot be " + mode);
}
return new RemoteConnectionInfo(clusterAlias, modeInfo, (String) args[2], (boolean) args[3]);
}
);
static {
PARSER.declareBoolean(constructorArg(), new ParseField(CONNECTED));
@ -133,10 +131,10 @@ public final class RemoteConnectionInfo {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RemoteConnectionInfo that = (RemoteConnectionInfo) o;
return skipUnavailable == that.skipUnavailable &&
Objects.equals(modeInfo, that.modeInfo) &&
Objects.equals(initialConnectionTimeoutString, that.initialConnectionTimeoutString) &&
Objects.equals(clusterAlias, that.clusterAlias);
return skipUnavailable == that.skipUnavailable
&& Objects.equals(modeInfo, that.modeInfo)
&& Objects.equals(initialConnectionTimeoutString, that.initialConnectionTimeoutString)
&& Objects.equals(clusterAlias, that.clusterAlias);
}
@Override

View File

@ -77,9 +77,9 @@ public class SniffModeInfo implements RemoteConnectionInfo.ModeInfo {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SniffModeInfo sniff = (SniffModeInfo) o;
return maxConnectionsPerCluster == sniff.maxConnectionsPerCluster &&
numNodesConnected == sniff.numNodesConnected &&
Objects.equals(seedNodes, sniff.seedNodes);
return maxConnectionsPerCluster == sniff.maxConnectionsPerCluster
&& numNodesConnected == sniff.numNodesConnected
&& Objects.equals(seedNodes, sniff.seedNodes);
}
@Override

View File

@ -45,8 +45,11 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.constructo
public class AcknowledgedResponse {
protected static final String PARSE_FIELD_NAME = "acknowledged";
private static final ConstructingObjectParser<AcknowledgedResponse, Void> PARSER = AcknowledgedResponse
.generateParser("acknowledged_response", AcknowledgedResponse::new, AcknowledgedResponse.PARSE_FIELD_NAME);
private static final ConstructingObjectParser<AcknowledgedResponse, Void> PARSER = AcknowledgedResponse.generateParser(
"acknowledged_response",
AcknowledgedResponse::new,
AcknowledgedResponse.PARSE_FIELD_NAME
);
private final boolean acknowledged;

View File

@ -65,8 +65,9 @@ public class BroadcastResponse {
private static final ParseField SHARDS_FIELD = new ParseField("_shards");
static final ConstructingObjectParser<BroadcastResponse, Void> PARSER = new ConstructingObjectParser<>(
"broadcast_response",
a -> new BroadcastResponse((Shards) a[0]));
"broadcast_response",
a -> new BroadcastResponse((Shards) a[0])
);
static {
declareShardsField(PARSER);
@ -149,11 +150,12 @@ public class BroadcastResponse {
}
Shards(
final int total,
final int successful,
final int skipped,
final int failed,
final Collection<DefaultShardOperationFailedException> failures) {
final int total,
final int successful,
final int skipped,
final int failed,
final Collection<DefaultShardOperationFailedException> failures
) {
this.total = total;
this.successful = successful;
this.skipped = skipped;
@ -169,13 +171,15 @@ public class BroadcastResponse {
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<Shards, Void> SHARDS_PARSER = new ConstructingObjectParser<>(
"shards",
a -> new Shards(
(int) a[0], // total
(int) a[1], // successful
a[2] == null ? 0 : (int) a[2], // skipped
(int) a[3], // failed
a[4] == null ? Collections.emptyList() : (Collection<DefaultShardOperationFailedException>) a[4])); // failures
"shards",
a -> new Shards(
(int) a[0], // total
(int) a[1], // successful
a[2] == null ? 0 : (int) a[2], // skipped
(int) a[3], // failed
a[4] == null ? Collections.emptyList() : (Collection<DefaultShardOperationFailedException>) a[4]
)
); // failures
static {
SHARDS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), TOTAL_FIELD);
@ -183,8 +187,10 @@ public class BroadcastResponse {
SHARDS_PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), SKIPPED_FIELD);
SHARDS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), FAILED_FIELD);
SHARDS_PARSER.declareObjectArray(
ConstructingObjectParser.optionalConstructorArg(),
DefaultShardOperationFailedException.PARSER, FAILURES_FIELD);
ConstructingObjectParser.optionalConstructorArg(),
DefaultShardOperationFailedException.PARSER,
FAILURES_FIELD
);
}
}

View File

@ -88,7 +88,8 @@ public final class CountRequest extends ActionRequest implements IndicesRequest.
*/
public CountRequest(String[] indices, QueryBuilder query) {
indices(indices);
this.query = Objects.requireNonNull(query, "query must not be null");;
this.query = Objects.requireNonNull(query, "query must not be null");
;
}
@Override
@ -261,14 +262,14 @@ public final class CountRequest extends ActionRequest implements IndicesRequest.
return false;
}
CountRequest that = (CountRequest) o;
return Objects.equals(indicesOptions, that.indicesOptions) &&
Arrays.equals(indices, that.indices) &&
Arrays.equals(types, that.types) &&
Objects.equals(routing, that.routing) &&
Objects.equals(preference, that.preference) &&
Objects.equals(terminateAfter, that.terminateAfter) &&
Objects.equals(minScore, that.minScore) &&
Objects.equals(query, that.query);
return Objects.equals(indicesOptions, that.indicesOptions)
&& Arrays.equals(indices, that.indices)
&& Arrays.equals(types, that.types)
&& Objects.equals(routing, that.routing)
&& Objects.equals(preference, that.preference)
&& Objects.equals(terminateAfter, that.terminateAfter)
&& Objects.equals(minScore, that.minScore)
&& Objects.equals(query, that.query);
}
@Override

View File

@ -120,7 +120,7 @@ public final class CountResponse {
String currentName = parser.currentName();
Boolean terminatedEarly = null;
long count = 0;
ShardStats shardStats = new ShardStats(-1, -1,0, ShardSearchFailure.EMPTY_ARRAY);
ShardStats shardStats = new ShardStats(-1, -1, 0, ShardSearchFailure.EMPTY_ARRAY);
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -146,11 +146,13 @@ public final class CountResponse {
@Override
public String toString() {
String s = "{" +
"count=" + count +
(isTerminatedEarly() != null ? ", terminatedEarly=" + terminatedEarly : "") +
", " + shardStats +
'}';
String s = "{"
+ "count="
+ count
+ (isTerminatedEarly() != null ? ", terminatedEarly=" + terminatedEarly : "")
+ ", "
+ shardStats
+ '}';
return s;
}
@ -200,7 +202,7 @@ public final class CountResponse {
static ShardStats fromXContent(XContentParser parser) throws IOException {
int successfulShards = -1;
int totalShards = -1;
int skippedShards = 0; //BWC @see org.opensearch.action.search.SearchResponse
int skippedShards = 0; // BWC @see org.opensearch.action.search.SearchResponse
List<ShardSearchFailure> failures = new ArrayList<>();
XContentParser.Token token;
String currentName = parser.currentName();
@ -236,13 +238,17 @@ public final class CountResponse {
@Override
public String toString() {
return "_shards : {" +
"total=" + totalShards +
", successful=" + successfulShards +
", skipped=" + skippedShards +
", failed=" + (shardFailures != null && shardFailures.length > 0 ? shardFailures.length : 0 ) +
(shardFailures != null && shardFailures.length > 0 ? ", failures: " + Arrays.asList(shardFailures): "") +
'}';
return "_shards : {"
+ "total="
+ totalShards
+ ", successful="
+ successfulShards
+ ", skipped="
+ skippedShards
+ ", failed="
+ (shardFailures != null && shardFailures.length > 0 ? shardFailures.length : 0)
+ (shardFailures != null && shardFailures.length > 0 ? ", failures: " + Arrays.asList(shardFailures) : "")
+ '}';
}
}
}

View File

@ -55,8 +55,7 @@ public final class GetSourceRequest implements Validatable {
}
public static GetSourceRequest from(GetRequest getRequest) {
return new GetSourceRequest(getRequest.index(), getRequest.id())
.routing(getRequest.routing())
return new GetSourceRequest(getRequest.index(), getRequest.id()).routing(getRequest.routing())
.preference(getRequest.preference())
.refresh(getRequest.refresh())
.realtime(getRequest.realtime())

View File

@ -63,9 +63,20 @@ public abstract class IndexerJobStats {
protected final long indexFailures;
protected final long searchFailures;
public IndexerJobStats(long numPages, long numInputDocuments, long numOutputDocuments, long numInvocations,
long indexTime, long searchTime, long processingTime, long indexTotal, long searchTotal, long processingTotal,
long indexFailures, long searchFailures) {
public IndexerJobStats(
long numPages,
long numInputDocuments,
long numOutputDocuments,
long numInvocations,
long indexTime,
long searchTime,
long processingTime,
long indexTotal,
long searchTotal,
long processingTotal,
long indexFailures,
long searchFailures
) {
this.numPages = numPages;
this.numInputDocuments = numInputDocuments;
this.numOuputDocuments = numOutputDocuments;
@ -165,7 +176,6 @@ public abstract class IndexerJobStats {
return processingTotal;
}
@Override
public boolean equals(Object other) {
if (this == other) {
@ -178,39 +188,63 @@ public abstract class IndexerJobStats {
IndexerJobStats that = (IndexerJobStats) other;
return Objects.equals(this.numPages, that.numPages)
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)
&& Objects.equals(this.numOuputDocuments, that.numOuputDocuments)
&& Objects.equals(this.numInvocations, that.numInvocations)
&& Objects.equals(this.indexTime, that.indexTime)
&& Objects.equals(this.searchTime, that.searchTime)
&& Objects.equals(this.processingTime, that.processingTime)
&& Objects.equals(this.indexFailures, that.indexFailures)
&& Objects.equals(this.searchFailures, that.searchFailures)
&& Objects.equals(this.searchTotal, that.searchTotal)
&& Objects.equals(this.processingTotal, that.processingTotal)
&& Objects.equals(this.indexTotal, that.indexTotal);
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)
&& Objects.equals(this.numOuputDocuments, that.numOuputDocuments)
&& Objects.equals(this.numInvocations, that.numInvocations)
&& Objects.equals(this.indexTime, that.indexTime)
&& Objects.equals(this.searchTime, that.searchTime)
&& Objects.equals(this.processingTime, that.processingTime)
&& Objects.equals(this.indexFailures, that.indexFailures)
&& Objects.equals(this.searchFailures, that.searchFailures)
&& Objects.equals(this.searchTotal, that.searchTotal)
&& Objects.equals(this.processingTotal, that.processingTotal)
&& Objects.equals(this.indexTotal, that.indexTotal);
}
@Override
public int hashCode() {
return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations,
indexTime, searchTime, processingTime, indexFailures, searchFailures, searchTotal,
indexTotal, processingTotal);
return Objects.hash(
numPages,
numInputDocuments,
numOuputDocuments,
numInvocations,
indexTime,
searchTime,
processingTime,
indexFailures,
searchFailures,
searchTotal,
indexTotal,
processingTotal
);
}
@Override
public final String toString() {
return "{pages=" + numPages
+ ", input_docs=" + numInputDocuments
+ ", output_docs=" + numOuputDocuments
+ ", invocations=" + numInvocations
+ ", index_failures=" + indexFailures
+ ", search_failures=" + searchFailures
+ ", index_time_in_ms=" + indexTime
+ ", index_total=" + indexTotal
+ ", search_time_in_ms=" + searchTime
+ ", search_total=" + searchTotal
+ ", processing_time_in_ms=" + processingTime
+ ", processing_total=" + processingTotal + "}";
return "{pages="
+ numPages
+ ", input_docs="
+ numInputDocuments
+ ", output_docs="
+ numOuputDocuments
+ ", invocations="
+ numInvocations
+ ", index_failures="
+ indexFailures
+ ", search_failures="
+ searchFailures
+ ", index_time_in_ms="
+ indexTime
+ ", index_total="
+ indexTotal
+ ", search_time_in_ms="
+ searchTime
+ ", search_total="
+ searchTotal
+ ", processing_time_in_ms="
+ processingTime
+ ", processing_total="
+ processingTotal
+ "}";
}
}

View File

@ -32,7 +32,6 @@
package org.opensearch.client.core;
import java.util.Locale;
/**

View File

@ -34,5 +34,4 @@ package org.opensearch.client.core;
import org.opensearch.client.Validatable;
public class MainRequest implements Validatable {
}
public class MainRequest implements Validatable {}

View File

@ -40,12 +40,11 @@ import java.util.Objects;
public class MainResponse {
private static final ConstructingObjectParser<MainResponse, Void> PARSER =
new ConstructingObjectParser<>(MainResponse.class.getName(), true,
args -> {
return new MainResponse((String) args[0], (Version) args[1], (String) args[2], (String) args[3]);
}
);
private static final ConstructingObjectParser<MainResponse, Void> PARSER = new ConstructingObjectParser<>(
MainResponse.class.getName(),
true,
args -> { return new MainResponse((String) args[0], (Version) args[1], (String) args[2], (String) args[3]); }
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("name"));
@ -92,10 +91,10 @@ public class MainResponse {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MainResponse that = (MainResponse) o;
return nodeName.equals(that.nodeName) &&
version.equals(that.version) &&
clusterName.equals(that.clusterName) &&
clusterUuid.equals(that.clusterUuid);
return nodeName.equals(that.nodeName)
&& version.equals(that.version)
&& clusterName.equals(that.clusterName)
&& clusterUuid.equals(that.clusterUuid);
}
@Override
@ -104,13 +103,22 @@ public class MainResponse {
}
public static class Version {
private static final ConstructingObjectParser<Version, Void> PARSER =
new ConstructingObjectParser<>(Version.class.getName(), true,
args -> {
return new Version((String) args[0], (String) args[1], (String) args[2], (String) args[3],
(Boolean) args[4], (String) args[5], (String) args[6], (String) args[7]);
}
);
private static final ConstructingObjectParser<Version, Void> PARSER = new ConstructingObjectParser<>(
Version.class.getName(),
true,
args -> {
return new Version(
(String) args[0],
(String) args[1],
(String) args[2],
(String) args[3],
(Boolean) args[4],
(String) args[5],
(String) args[6],
(String) args[7]
);
}
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("number"));
@ -132,8 +140,16 @@ public class MainResponse {
private final String minimumWireCompatibilityVersion;
private final String minimumIndexCompatibilityVersion;
public Version(String number, String buildType, String buildHash, String buildDate, boolean isSnapshot,
String luceneVersion, String minimumWireCompatibilityVersion, String minimumIndexCompatibilityVersion) {
public Version(
String number,
String buildType,
String buildHash,
String buildDate,
boolean isSnapshot,
String luceneVersion,
String minimumWireCompatibilityVersion,
String minimumIndexCompatibilityVersion
) {
this.number = number;
this.buildType = buildType;
this.buildHash = buildHash;
@ -181,20 +197,28 @@ public class MainResponse {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Version version = (Version) o;
return isSnapshot == version.isSnapshot &&
number.equals(version.number) &&
Objects.equals(buildType, version.buildType) &&
buildHash.equals(version.buildHash) &&
buildDate.equals(version.buildDate) &&
luceneVersion.equals(version.luceneVersion) &&
minimumWireCompatibilityVersion.equals(version.minimumWireCompatibilityVersion) &&
minimumIndexCompatibilityVersion.equals(version.minimumIndexCompatibilityVersion);
return isSnapshot == version.isSnapshot
&& number.equals(version.number)
&& Objects.equals(buildType, version.buildType)
&& buildHash.equals(version.buildHash)
&& buildDate.equals(version.buildDate)
&& luceneVersion.equals(version.luceneVersion)
&& minimumWireCompatibilityVersion.equals(version.minimumWireCompatibilityVersion)
&& minimumIndexCompatibilityVersion.equals(version.minimumIndexCompatibilityVersion);
}
@Override
public int hashCode() {
return Objects.hash(number, buildType, buildHash, buildDate, isSnapshot, luceneVersion,
minimumWireCompatibilityVersion, minimumIndexCompatibilityVersion);
return Objects.hash(
number,
buildType,
buildHash,
buildDate,
isSnapshot,
luceneVersion,
minimumWireCompatibilityVersion,
minimumIndexCompatibilityVersion
);
}
}
}

View File

@ -32,7 +32,6 @@
package org.opensearch.client.core;
import org.opensearch.common.ParseField;
import org.opensearch.common.xcontent.ConstructingObjectParser;
import org.opensearch.common.xcontent.XContentParser;
@ -49,17 +48,19 @@ public class MultiTermVectorsResponse {
this.responses = responses;
}
private static final ConstructingObjectParser<MultiTermVectorsResponse, Void> PARSER =
new ConstructingObjectParser<>("multi_term_vectors", true,
private static final ConstructingObjectParser<MultiTermVectorsResponse, Void> PARSER = new ConstructingObjectParser<>(
"multi_term_vectors",
true,
args -> {
// as the response comes from server, we are sure that args[0] will be a list of TermVectorsResponse
@SuppressWarnings("unchecked") List<TermVectorsResponse> termVectorsResponsesList = (List<TermVectorsResponse>) args[0];
@SuppressWarnings("unchecked")
List<TermVectorsResponse> termVectorsResponsesList = (List<TermVectorsResponse>) args[0];
return new MultiTermVectorsResponse(termVectorsResponsesList);
}
);
static {
PARSER.declareObjectArray(constructorArg(), (p,c) -> TermVectorsResponse.fromXContent(p), new ParseField("docs"));
PARSER.declareObjectArray(constructorArg(), (p, c) -> TermVectorsResponse.fromXContent(p), new ParseField("docs"));
}
public static MultiTermVectorsResponse fromXContent(XContentParser parser) {
@ -73,7 +74,6 @@ public class MultiTermVectorsResponse {
return responses;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;

View File

@ -49,8 +49,10 @@ public class PageParams implements ToXContentObject {
public static final ParseField FROM = new ParseField("from");
public static final ParseField SIZE = new ParseField("size");
public static final ConstructingObjectParser<PageParams, Void> PARSER = new ConstructingObjectParser<>(PAGE.getPreferredName(),
a -> new PageParams((Integer) a[0], (Integer) a[1]));
public static final ConstructingObjectParser<PageParams, Void> PARSER = new ConstructingObjectParser<>(
PAGE.getPreferredName(),
a -> new PageParams((Integer) a[0], (Integer) a[1])
);
static {
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), FROM);
@ -105,8 +107,7 @@ public class PageParams implements ToXContentObject {
return false;
}
PageParams other = (PageParams) obj;
return Objects.equals(from, other.from) &&
Objects.equals(size, other.size);
return Objects.equals(from, other.from) && Objects.equals(size, other.size);
}
}

View File

@ -42,10 +42,14 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.constructo
public class ShardsAcknowledgedResponse extends AcknowledgedResponse {
protected static final String SHARDS_PARSE_FIELD_NAME = "shards_acknowledged";
private static ConstructingObjectParser<ShardsAcknowledgedResponse, Void> buildParser() {
ConstructingObjectParser<ShardsAcknowledgedResponse, Void> p = new ConstructingObjectParser<>("freeze", true,
args -> new ShardsAcknowledgedResponse((boolean) args[0], (boolean) args[1]));
ConstructingObjectParser<ShardsAcknowledgedResponse, Void> p = new ConstructingObjectParser<>(
"freeze",
true,
args -> new ShardsAcknowledgedResponse((boolean) args[0], (boolean) args[1])
);
p.declareBoolean(constructorArg(), new ParseField(AcknowledgedResponse.PARSE_FIELD_NAME));
p.declareBoolean(constructorArg(), new ParseField(SHARDS_PARSE_FIELD_NAME));
return p;

View File

@ -45,7 +45,8 @@ import java.util.Map;
public class TermVectorsRequest implements ToXContentObject, Validatable {
private final String index;
@Nullable private final String type;
@Nullable
private final String type;
private String id = null;
private XContentBuilder docBuilder = null;
@ -118,7 +119,6 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
this.docBuilder = docBuilder;
}
/**
* Constructs a new TermVectorRequest from a template
* using the provided document id
@ -262,7 +262,6 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
return realtime;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@ -289,8 +288,14 @@ public class TermVectorsRequest implements ToXContentObject, Validatable {
if (filterSettings != null) {
builder.startObject("filter");
String[] filterSettingNames =
{"max_num_terms", "min_term_freq", "max_term_freq", "min_doc_freq", "max_doc_freq", "min_word_length", "max_word_length"};
String[] filterSettingNames = {
"max_num_terms",
"min_term_freq",
"max_term_freq",
"min_doc_freq",
"max_doc_freq",
"min_word_length",
"max_word_length" };
for (String settingName : filterSettingNames) {
if (filterSettings.containsKey(settingName)) builder.field(settingName, filterSettings.get(settingName));
}

View File

@ -54,7 +54,14 @@ public class TermVectorsResponse {
private final List<TermVector> termVectorList;
public TermVectorsResponse(
String index, String type, String id, long version, boolean found, long tookInMillis, List<TermVector> termVectorList) {
String index,
String type,
String id,
long version,
boolean found,
long tookInMillis,
List<TermVector> termVectorList
) {
this.index = index;
this.type = type;
this.id = id;
@ -64,10 +71,13 @@ public class TermVectorsResponse {
this.termVectorList = termVectorList;
}
private static final ConstructingObjectParser<TermVectorsResponse, Void> PARSER = new ConstructingObjectParser<>("term_vectors", true,
private static final ConstructingObjectParser<TermVectorsResponse, Void> PARSER = new ConstructingObjectParser<>(
"term_vectors",
true,
args -> {
// as the response comes from server, we are sure that args[6] will be a list of TermVector
@SuppressWarnings("unchecked") List<TermVector> termVectorList = (List<TermVector>) args[6];
@SuppressWarnings("unchecked")
List<TermVector> termVectorList = (List<TermVector>) args[6];
if (termVectorList != null) {
Collections.sort(termVectorList, Comparator.comparing(TermVector::getFieldName));
}
@ -90,8 +100,11 @@ public class TermVectorsResponse {
PARSER.declareLong(constructorArg(), new ParseField("_version"));
PARSER.declareBoolean(constructorArg(), new ParseField("found"));
PARSER.declareLong(constructorArg(), new ParseField("took"));
PARSER.declareNamedObjects(optionalConstructorArg(),
(p, c, fieldName) -> TermVector.fromXContent(p, fieldName), new ParseField("term_vectors"));
PARSER.declareNamedObjects(
optionalConstructorArg(),
(p, c, fieldName) -> TermVector.fromXContent(p, fieldName),
new ParseField("term_vectors")
);
}
public static TermVectorsResponse fromXContent(XContentParser parser) {
@ -135,7 +148,7 @@ public class TermVectorsResponse {
* Returns the document version
*/
public long getDocVersion() {
return docVersion;
return docVersion;
}
/**
@ -148,7 +161,7 @@ public class TermVectorsResponse {
/**
* Returns the list of term vectors
*/
public List<TermVector> getTermVectorsList(){
public List<TermVector> getTermVectorsList() {
return termVectorList;
}
@ -171,13 +184,15 @@ public class TermVectorsResponse {
return Objects.hash(index, type, id, docVersion, found, tookInMillis, termVectorList);
}
public static final class TermVector {
private static final ConstructingObjectParser<TermVector, String> PARSER = new ConstructingObjectParser<>("term_vector", true,
(args, ctxFieldName) -> {
private static final ConstructingObjectParser<TermVector, String> PARSER = new ConstructingObjectParser<>(
"term_vector",
true,
(args, ctxFieldName) -> {
// as the response comes from server, we are sure that args[1] will be a list of Term
@SuppressWarnings("unchecked") List<Term> terms = (List<Term>) args[1];
@SuppressWarnings("unchecked")
List<Term> terms = (List<Term>) args[1];
if (terms != null) {
Collections.sort(terms, Comparator.comparing(Term::getTerm));
}
@ -186,8 +201,7 @@ public class TermVectorsResponse {
);
static {
PARSER.declareObject(optionalConstructorArg(),
(p,c) -> FieldStatistics.fromXContent(p), new ParseField("field_statistics"));
PARSER.declareObject(optionalConstructorArg(), (p, c) -> FieldStatistics.fromXContent(p), new ParseField("field_statistics"));
PARSER.declareNamedObjects(optionalConstructorArg(), (p, c, term) -> Term.fromXContent(p, term), new ParseField("terms"));
}
@ -228,7 +242,6 @@ public class TermVectorsResponse {
return fieldStatistics;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
@ -248,10 +261,9 @@ public class TermVectorsResponse {
public static final class FieldStatistics {
private static final ConstructingObjectParser<FieldStatistics, Void> PARSER = new ConstructingObjectParser<>(
"field_statistics", true,
args -> {
return new FieldStatistics((long) args[0], (int) args[1], (long) args[2]);
}
"field_statistics",
true,
args -> { return new FieldStatistics((long) args[0], (int) args[1], (long) args[2]); }
);
static {
@ -293,14 +305,13 @@ public class TermVectorsResponse {
public long getSumTotalTermFreq() {
return sumTotalTermFreq;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof FieldStatistics)) return false;
FieldStatistics other = (FieldStatistics) obj;
return docCount == other.docCount
&& sumDocFreq == other.sumDocFreq
&& sumTotalTermFreq == other.sumTotalTermFreq;
return docCount == other.docCount && sumDocFreq == other.sumDocFreq && sumTotalTermFreq == other.sumTotalTermFreq;
}
@Override
@ -309,12 +320,14 @@ public class TermVectorsResponse {
}
}
public static final class Term {
private static final ConstructingObjectParser<Term, String> PARSER = new ConstructingObjectParser<>("token", true,
(args, ctxTerm) -> {
private static final ConstructingObjectParser<Term, String> PARSER = new ConstructingObjectParser<>(
"token",
true,
(args, ctxTerm) -> {
// as the response comes from server, we are sure that args[4] will be a list of Token
@SuppressWarnings("unchecked") List<Token> tokens = (List<Token>) args[4];
@SuppressWarnings("unchecked")
List<Token> tokens = (List<Token>) args[4];
if (tokens != null) {
Collections.sort(
tokens,
@ -331,7 +344,7 @@ public class TermVectorsResponse {
PARSER.declareInt(optionalConstructorArg(), new ParseField("doc_freq"));
PARSER.declareLong(optionalConstructorArg(), new ParseField("ttf"));
PARSER.declareFloat(optionalConstructorArg(), new ParseField("score"));
PARSER.declareObjectArray(optionalConstructorArg(), (p,c) -> Token.fromXContent(p), new ParseField("tokens"));
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Token.fromXContent(p), new ParseField("tokens"));
}
private final String term;
@ -382,14 +395,14 @@ public class TermVectorsResponse {
/**
* Returns total term frequency - the number of times this term occurs across all documents
*/
public Long getTotalTermFreq( ){
public Long getTotalTermFreq() {
return totalTermFreq;
}
/**
* Returns tf-idf score, if the request used some form of terms filtering
*/
public Float getScore(){
public Float getScore() {
return score;
}
@ -419,13 +432,13 @@ public class TermVectorsResponse {
}
}
public static final class Token {
private static final ConstructingObjectParser<Token, Void> PARSER = new ConstructingObjectParser<>("token", true,
args -> {
return new Token((Integer) args[0], (Integer) args[1], (Integer) args[2], (String) args[3]);
});
private static final ConstructingObjectParser<Token, Void> PARSER = new ConstructingObjectParser<>(
"token",
true,
args -> { return new Token((Integer) args[0], (Integer) args[1], (Integer) args[2], (String) args[3]); }
);
static {
PARSER.declareInt(optionalConstructorArg(), new ParseField("start_offset"));
PARSER.declareInt(optionalConstructorArg(), new ParseField("end_offset"));
@ -442,8 +455,7 @@ public class TermVectorsResponse {
@Nullable
private final String payload;
public Token(Integer startOffset, Integer endOffset, Integer position, String payload) {
public Token(Integer startOffset, Integer endOffset, Integer position, String payload) {
this.startOffset = startOffset;
this.endOffset = endOffset;
this.position = position;
@ -488,7 +500,7 @@ public class TermVectorsResponse {
if (!(obj instanceof Token)) return false;
Token other = (Token) obj;
return Objects.equals(startOffset, other.startOffset)
&& Objects.equals(endOffset,other.endOffset)
&& Objects.equals(endOffset, other.endOffset)
&& Objects.equals(position, other.position)
&& Objects.equals(payload, other.payload);
}

View File

@ -188,8 +188,13 @@ public class AnalyzeRequest implements Validatable, ToXContentObject {
this.text = text;
}
private AnalyzeRequest(String index, NameOrDefinition tokenizer, List<NameOrDefinition> charFilters,
List<NameOrDefinition> tokenFilters, String... text) {
private AnalyzeRequest(
String index,
NameOrDefinition tokenizer,
List<NameOrDefinition> charFilters,
List<NameOrDefinition> tokenFilters,
String... text
) {
this.index = index;
this.analyzer = null;
this.normalizer = null;

View File

@ -64,13 +64,13 @@ public class AnalyzeResponse {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeResponse.AnalyzeToken that = (AnalyzeResponse.AnalyzeToken) o;
return startOffset == that.startOffset &&
endOffset == that.endOffset &&
position == that.position &&
positionLength == that.positionLength &&
Objects.equals(term, that.term) &&
Objects.equals(attributes, that.attributes) &&
Objects.equals(type, that.type);
return startOffset == that.startOffset
&& endOffset == that.endOffset
&& position == that.position
&& positionLength == that.positionLength
&& Objects.equals(term, that.term)
&& Objects.equals(attributes, that.attributes)
&& Objects.equals(type, that.type);
}
@Override
@ -134,8 +134,11 @@ public class AnalyzeResponse {
this.attributes.put(key, value);
}
private static final ObjectParser<AnalyzeToken, Void> PARSER
= new ObjectParser<>("analyze_token", AnalyzeToken::setAttribute, AnalyzeToken::new);
private static final ObjectParser<AnalyzeToken, Void> PARSER = new ObjectParser<>(
"analyze_token",
AnalyzeToken::setAttribute,
AnalyzeToken::new
);
static {
PARSER.declareString(AnalyzeToken::setTerm, new ParseField("token"));
PARSER.declareString(AnalyzeToken::setType, new ParseField("type"));
@ -167,8 +170,11 @@ public class AnalyzeResponse {
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<AnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("analyze_response",
true, args -> new AnalyzeResponse((List<AnalyzeResponse.AnalyzeToken>) args[0], (DetailAnalyzeResponse) args[1]));
private static final ConstructingObjectParser<AnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>(
"analyze_response",
true,
args -> new AnalyzeResponse((List<AnalyzeResponse.AnalyzeToken>) args[0], (DetailAnalyzeResponse) args[1])
);
static {
PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeToken.PARSER, new ParseField(TOKENS));
@ -184,8 +190,7 @@ public class AnalyzeResponse {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeResponse that = (AnalyzeResponse) o;
return Objects.equals(detail, that.detail) &&
Objects.equals(tokens, that.tokens);
return Objects.equals(detail, that.detail) && Objects.equals(tokens, that.tokens);
}
@Override

View File

@ -51,13 +51,16 @@ import static org.opensearch.common.xcontent.ObjectParser.ValueType;
public class CloseIndexResponse extends ShardsAcknowledgedResponse {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<CloseIndexResponse, Void> PARSER = new ConstructingObjectParser<>("close_index_response",
true, args -> {
boolean acknowledged = (boolean) args[0];
boolean shardsAcknowledged = args[1] != null ? (boolean) args[1] : acknowledged;
List<CloseIndexResponse.IndexResult> indices = args[2] != null ? (List<CloseIndexResponse.IndexResult>) args[2] : emptyList();
return new CloseIndexResponse(acknowledged, shardsAcknowledged, indices);
});
private static final ConstructingObjectParser<CloseIndexResponse, Void> PARSER = new ConstructingObjectParser<>(
"close_index_response",
true,
args -> {
boolean acknowledged = (boolean) args[0];
boolean shardsAcknowledged = args[1] != null ? (boolean) args[1] : acknowledged;
List<CloseIndexResponse.IndexResult> indices = args[2] != null ? (List<CloseIndexResponse.IndexResult>) args[2] : emptyList();
return new CloseIndexResponse(acknowledged, shardsAcknowledged, indices);
}
);
static {
declareAcknowledgedField(PARSER);
@ -83,7 +86,9 @@ public class CloseIndexResponse extends ShardsAcknowledgedResponse {
public static class IndexResult {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<IndexResult, String> PARSER = new ConstructingObjectParser<>("index_result", true,
private static final ConstructingObjectParser<IndexResult, String> PARSER = new ConstructingObjectParser<>(
"index_result",
true,
(args, index) -> {
Exception exception = (Exception) args[1];
if (exception != null) {
@ -97,7 +102,8 @@ public class CloseIndexResponse extends ShardsAcknowledgedResponse {
}
assert (boolean) args[0];
return new IndexResult(index);
});
}
);
static {
PARSER.declareBoolean(optionalConstructorArg(), new ParseField("closed"));
PARSER.declareObject(optionalConstructorArg(), (p, c) -> {
@ -107,8 +113,11 @@ public class CloseIndexResponse extends ShardsAcknowledgedResponse {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p);
return e;
}, new ParseField("exception"));
PARSER.declareNamedObjects(optionalConstructorArg(),
(p, c, id) -> ShardResult.fromXContent(p, id), new ParseField("failedShards"));
PARSER.declareNamedObjects(
optionalConstructorArg(),
(p, c, id) -> ShardResult.fromXContent(p, id),
new ParseField("failedShards")
);
}
private final String index;
@ -167,11 +176,14 @@ public class CloseIndexResponse extends ShardsAcknowledgedResponse {
public static class ShardResult {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<ShardResult, String> PARSER = new ConstructingObjectParser<>("shard_result", true,
private static final ConstructingObjectParser<ShardResult, String> PARSER = new ConstructingObjectParser<>(
"shard_result",
true,
(arg, id) -> {
Failure[] failures = arg[0] != null ? ((List<Failure>) arg[0]).toArray(new Failure[0]) : new Failure[0];
return new ShardResult(Integer.parseInt(id), failures);
});
}
);
static {
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Failure.PARSER.apply(p, null), new ParseField("failures"));
@ -203,8 +215,11 @@ public class CloseIndexResponse extends ShardsAcknowledgedResponse {
public static class Failure extends DefaultShardOperationFailedException {
static final ConstructingObjectParser<Failure, Void> PARSER = new ConstructingObjectParser<>("failure", true,
arg -> new Failure((String) arg[0], (int) arg[1], (Throwable) arg[2], (String) arg[3]));
static final ConstructingObjectParser<Failure, Void> PARSER = new ConstructingObjectParser<>(
"failure",
true,
arg -> new Failure((String) arg[0], (int) arg[1], (Throwable) arg[2], (String) arg[3])
);
static {
declareFields(PARSER);

View File

@ -183,7 +183,7 @@ public class CreateIndexRequest extends TimedRequest implements Validatable, ToX
* @param source The mapping source
*/
public CreateIndexRequest mapping(Map<String, ?> source) {
try {
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source);
return mapping(BytesReference.bytes(builder), builder.contentType());
@ -243,15 +243,21 @@ public class CreateIndexRequest extends TimedRequest implements Validatable, ToX
*/
public CreateIndexRequest aliases(BytesReference source, XContentType contentType) {
// EMPTY is safe here because we never call namedObject
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source, contentType)) {
//move to the first alias
try (
XContentParser parser = XContentHelper.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
source,
contentType
)
) {
// move to the first alias
parser.nextToken();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
alias(Alias.fromXContent(parser));
}
return this;
} catch(IOException e) {
} catch (IOException e) {
throw new OpenSearchParseException("Failed to parse aliases", e);
}
}

View File

@ -48,8 +48,11 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.constructo
public class CreateIndexResponse extends ShardsAcknowledgedResponse {
private static final ParseField INDEX = new ParseField("index");
private static final ConstructingObjectParser<CreateIndexResponse, Void> PARSER = new ConstructingObjectParser<>("create_index",
true, args -> new CreateIndexResponse((boolean) args[0], (boolean) args[1], (String) args[2]));
private static final ConstructingObjectParser<CreateIndexResponse, Void> PARSER = new ConstructingObjectParser<>(
"create_index",
true,
args -> new CreateIndexResponse((boolean) args[0], (boolean) args[1], (String) args[2])
);
static {
declareAcknowledgedAndShardsAcknowledgedFields(PARSER);

View File

@ -53,8 +53,14 @@ public final class DataStream {
@Nullable
String indexTemplate;
public DataStream(String name, String timeStampField, List<String> indices, long generation, ClusterHealthStatus dataStreamStatus,
@Nullable String indexTemplate) {
public DataStream(
String name,
String timeStampField,
List<String> indices,
long generation,
ClusterHealthStatus dataStreamStatus,
@Nullable String indexTemplate
) {
this.name = name;
this.timeStampField = timeStampField;
this.indices = indices;
@ -95,18 +101,16 @@ public final class DataStream {
public static final ParseField INDEX_TEMPLATE_FIELD = new ParseField("template");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<DataStream, Void> PARSER = new ConstructingObjectParser<>("data_stream",
args -> {
String dataStreamName = (String) args[0];
String timeStampField = (String) ((Map<?, ?>) args[1]).get("name");
List<String> indices =
((List<Map<String, String>>) args[2]).stream().map(m -> m.get("index_name")).collect(Collectors.toList());
Long generation = (Long) args[3];
String statusStr = (String) args[4];
ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr);
String indexTemplate = (String) args[5];
return new DataStream(dataStreamName, timeStampField, indices, generation, status, indexTemplate);
});
private static final ConstructingObjectParser<DataStream, Void> PARSER = new ConstructingObjectParser<>("data_stream", args -> {
String dataStreamName = (String) args[0];
String timeStampField = (String) ((Map<?, ?>) args[1]).get("name");
List<String> indices = ((List<Map<String, String>>) args[2]).stream().map(m -> m.get("index_name")).collect(Collectors.toList());
Long generation = (Long) args[3];
String statusStr = (String) args[4];
ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr);
String indexTemplate = (String) args[5];
return new DataStream(dataStreamName, timeStampField, indices, generation, status, indexTemplate);
});
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD);
@ -126,12 +130,12 @@ public final class DataStream {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DataStream that = (DataStream) o;
return generation == that.generation &&
name.equals(that.name) &&
timeStampField.equals(that.timeStampField) &&
indices.equals(that.indices) &&
dataStreamStatus == that.dataStreamStatus &&
Objects.equals(indexTemplate, that.indexTemplate);
return generation == that.generation
&& name.equals(that.name)
&& timeStampField.equals(that.timeStampField)
&& indices.equals(that.indices)
&& dataStreamStatus == that.dataStreamStatus
&& Objects.equals(indexTemplate, that.indexTemplate);
}
@Override

View File

@ -54,8 +54,13 @@ public class DataStreamsStatsResponse extends BroadcastResponse {
private final ByteSizeValue totalStoreSize;
private final Map<String, DataStreamStats> dataStreams;
protected DataStreamsStatsResponse(Shards shards, int dataStreamCount, int backingIndices, ByteSizeValue totalStoreSize,
Map<String, DataStreamStats> dataStreams) {
protected DataStreamsStatsResponse(
Shards shards,
int dataStreamCount,
int backingIndices,
ByteSizeValue totalStoreSize,
Map<String, DataStreamStats> dataStreams
) {
super(shards);
this.dataStreamCount = dataStreamCount;
this.backingIndices = backingIndices;
@ -73,38 +78,52 @@ public class DataStreamsStatsResponse extends BroadcastResponse {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<DataStreamsStatsResponse, Void> PARSER = new ConstructingObjectParser<>(
"data_streams_stats", true, arg -> {
Shards shards = (Shards) arg[0];
Integer dataStreamCount = ((Integer) arg[1]);
Integer backingIndices = ((Integer) arg[2]);
ByteSizeValue totalStoreSize = ((ByteSizeValue) arg[3]);
Map<String, DataStreamStats> dataStreams = new HashMap<>();
for (DataStreamStats dataStreamStats : ((List<DataStreamStats>) arg[4])) {
dataStreams.put(dataStreamStats.dataStream, dataStreamStats);
"data_streams_stats",
true,
arg -> {
Shards shards = (Shards) arg[0];
Integer dataStreamCount = ((Integer) arg[1]);
Integer backingIndices = ((Integer) arg[2]);
ByteSizeValue totalStoreSize = ((ByteSizeValue) arg[3]);
Map<String, DataStreamStats> dataStreams = new HashMap<>();
for (DataStreamStats dataStreamStats : ((List<DataStreamStats>) arg[4])) {
dataStreams.put(dataStreamStats.dataStream, dataStreamStats);
}
return new DataStreamsStatsResponse(shards, dataStreamCount, backingIndices, totalStoreSize, dataStreams);
}
return new DataStreamsStatsResponse(shards, dataStreamCount, backingIndices, totalStoreSize, dataStreams);
});
);
private static final ConstructingObjectParser<DataStreamStats, Void> ENTRY_PARSER = new ConstructingObjectParser<>(
"data_streams_stats.entry", true, arg -> {
String dataStream = ((String) arg[0]);
Integer backingIndices = ((Integer) arg[1]);
ByteSizeValue storeSize = ((ByteSizeValue) arg[2]);
Long maximumTimestamp = ((Long) arg[3]);
return new DataStreamStats(dataStream, backingIndices, storeSize, maximumTimestamp);
});
"data_streams_stats.entry",
true,
arg -> {
String dataStream = ((String) arg[0]);
Integer backingIndices = ((Integer) arg[1]);
ByteSizeValue storeSize = ((ByteSizeValue) arg[2]);
Long maximumTimestamp = ((Long) arg[3]);
return new DataStreamStats(dataStream, backingIndices, storeSize, maximumTimestamp);
}
);
static {
declareShardsField(PARSER);
PARSER.declareInt(constructorArg(), DATA_STREAM_COUNT);
PARSER.declareInt(constructorArg(), BACKING_INDICES);
PARSER.declareField(constructorArg(), (p, c) -> new ByteSizeValue(p.longValue()), TOTAL_STORE_SIZE_BYTES,
ObjectParser.ValueType.VALUE);
PARSER.declareField(
constructorArg(),
(p, c) -> new ByteSizeValue(p.longValue()),
TOTAL_STORE_SIZE_BYTES,
ObjectParser.ValueType.VALUE
);
PARSER.declareObjectArray(constructorArg(), ENTRY_PARSER, DATA_STREAMS);
ENTRY_PARSER.declareString(constructorArg(), DATA_STREAM);
ENTRY_PARSER.declareInt(constructorArg(), BACKING_INDICES);
ENTRY_PARSER.declareField(constructorArg(), (p, c) -> new ByteSizeValue(p.longValue()), STORE_SIZE_BYTES,
ObjectParser.ValueType.VALUE);
ENTRY_PARSER.declareField(
constructorArg(),
(p, c) -> new ByteSizeValue(p.longValue()),
STORE_SIZE_BYTES,
ObjectParser.ValueType.VALUE
);
ENTRY_PARSER.declareLong(constructorArg(), MAXIMUM_TIMESTAMP);
}
@ -137,10 +156,10 @@ public class DataStreamsStatsResponse extends BroadcastResponse {
return false;
}
DataStreamsStatsResponse that = (DataStreamsStatsResponse) obj;
return dataStreamCount == that.dataStreamCount &&
backingIndices == that.backingIndices &&
Objects.equals(totalStoreSize, that.totalStoreSize) &&
Objects.equals(dataStreams, that.dataStreams);
return dataStreamCount == that.dataStreamCount
&& backingIndices == that.backingIndices
&& Objects.equals(totalStoreSize, that.totalStoreSize)
&& Objects.equals(dataStreams, that.dataStreams);
}
@Override
@ -150,12 +169,16 @@ public class DataStreamsStatsResponse extends BroadcastResponse {
@Override
public String toString() {
return "DataStreamsStatsResponse{" +
"dataStreamCount=" + dataStreamCount +
", backingIndices=" + backingIndices +
", totalStoreSize=" + totalStoreSize +
", dataStreams=" + dataStreams +
'}';
return "DataStreamsStatsResponse{"
+ "dataStreamCount="
+ dataStreamCount
+ ", backingIndices="
+ backingIndices
+ ", totalStoreSize="
+ totalStoreSize
+ ", dataStreams="
+ dataStreams
+ '}';
}
public static class DataStreamStats {
@ -197,10 +220,10 @@ public class DataStreamsStatsResponse extends BroadcastResponse {
return false;
}
DataStreamStats that = (DataStreamStats) obj;
return backingIndices == that.backingIndices &&
maximumTimestamp == that.maximumTimestamp &&
Objects.equals(dataStream, that.dataStream) &&
Objects.equals(storeSize, that.storeSize);
return backingIndices == that.backingIndices
&& maximumTimestamp == that.maximumTimestamp
&& Objects.equals(dataStream, that.dataStream)
&& Objects.equals(storeSize, that.storeSize);
}
@Override
@ -210,12 +233,17 @@ public class DataStreamsStatsResponse extends BroadcastResponse {
@Override
public String toString() {
return "DataStreamStats{" +
"dataStream='" + dataStream + '\'' +
", backingIndices=" + backingIndices +
", storeSize=" + storeSize +
", maximumTimestamp=" + maximumTimestamp +
'}';
return "DataStreamStats{"
+ "dataStream='"
+ dataStream
+ '\''
+ ", backingIndices="
+ backingIndices
+ ", storeSize="
+ storeSize
+ ", maximumTimestamp="
+ maximumTimestamp
+ '}';
}
}
}

View File

@ -53,16 +53,18 @@ public class DetailAnalyzeResponse {
private final AnalyzeTokenList tokenizer;
private final AnalyzeTokenList[] tokenfilters;
private DetailAnalyzeResponse(boolean customAnalyzer,
AnalyzeTokenList analyzer,
List<CharFilteredText> charfilters,
AnalyzeTokenList tokenizer,
List<AnalyzeTokenList> tokenfilters) {
private DetailAnalyzeResponse(
boolean customAnalyzer,
AnalyzeTokenList analyzer,
List<CharFilteredText> charfilters,
AnalyzeTokenList tokenizer,
List<AnalyzeTokenList> tokenfilters
) {
this.customAnalyzer = customAnalyzer;
this.analyzer = analyzer;
this.charfilters = charfilters == null ? null : charfilters.toArray(new CharFilteredText[]{});
this.charfilters = charfilters == null ? null : charfilters.toArray(new CharFilteredText[] {});
this.tokenizer = tokenizer;
this.tokenfilters = tokenfilters == null ? null : tokenfilters.toArray(new AnalyzeTokenList[]{});
this.tokenfilters = tokenfilters == null ? null : tokenfilters.toArray(new AnalyzeTokenList[] {});
}
public AnalyzeTokenList analyzer() {
@ -86,11 +88,11 @@ public class DetailAnalyzeResponse {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DetailAnalyzeResponse that = (DetailAnalyzeResponse) o;
return customAnalyzer == that.customAnalyzer &&
Objects.equals(analyzer, that.analyzer) &&
Arrays.equals(charfilters, that.charfilters) &&
Objects.equals(tokenizer, that.tokenizer) &&
Arrays.equals(tokenfilters, that.tokenfilters);
return customAnalyzer == that.customAnalyzer
&& Objects.equals(analyzer, that.analyzer)
&& Arrays.equals(charfilters, that.charfilters)
&& Objects.equals(tokenizer, that.tokenizer)
&& Arrays.equals(tokenfilters, that.tokenfilters);
}
@Override
@ -102,13 +104,17 @@ public class DetailAnalyzeResponse {
}
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<DetailAnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>("detail",
true, args -> new DetailAnalyzeResponse(
(boolean) args[0],
(AnalyzeTokenList) args[1],
(List<CharFilteredText>)args[2],
(AnalyzeTokenList) args[3],
(List<AnalyzeTokenList>)args[4]));
static final ConstructingObjectParser<DetailAnalyzeResponse, Void> PARSER = new ConstructingObjectParser<>(
"detail",
true,
args -> new DetailAnalyzeResponse(
(boolean) args[0],
(AnalyzeTokenList) args[1],
(List<CharFilteredText>) args[2],
(AnalyzeTokenList) args[3],
(List<AnalyzeTokenList>) args[4]
)
);
static {
PARSER.declareBoolean(constructorArg(), new ParseField("custom_analyzer"));
@ -131,8 +137,7 @@ public class DetailAnalyzeResponse {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnalyzeTokenList that = (AnalyzeTokenList) o;
return Objects.equals(name, that.name) &&
Arrays.equals(tokens, that.tokens);
return Objects.equals(name, that.name) && Arrays.equals(tokens, that.tokens);
}
@Override
@ -144,7 +149,7 @@ public class DetailAnalyzeResponse {
public AnalyzeTokenList(String name, List<AnalyzeResponse.AnalyzeToken> tokens) {
this.name = name;
this.tokens = tokens.toArray(new AnalyzeResponse.AnalyzeToken[]{});
this.tokens = tokens.toArray(new AnalyzeResponse.AnalyzeToken[] {});
}
public String getName() {
@ -156,14 +161,15 @@ public class DetailAnalyzeResponse {
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<AnalyzeTokenList, Void> PARSER = new ConstructingObjectParser<>("token_list",
true, args -> new AnalyzeTokenList((String) args[0],
(List<AnalyzeResponse.AnalyzeToken>)args[1]));
private static final ConstructingObjectParser<AnalyzeTokenList, Void> PARSER = new ConstructingObjectParser<>(
"token_list",
true,
args -> new AnalyzeTokenList((String) args[0], (List<AnalyzeResponse.AnalyzeToken>) args[1])
);
static {
PARSER.declareString(constructorArg(), new ParseField("name"));
PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p),
new ParseField("tokens"));
PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), new ParseField("tokens"));
}
public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException {
@ -194,8 +200,11 @@ public class DetailAnalyzeResponse {
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<CharFilteredText, Void> PARSER = new ConstructingObjectParser<>("char_filtered_text",
true, args -> new CharFilteredText((String) args[0], ((List<String>) args[1]).toArray(new String[0])));
private static final ConstructingObjectParser<CharFilteredText, Void> PARSER = new ConstructingObjectParser<>(
"char_filtered_text",
true,
args -> new CharFilteredText((String) args[0], ((List<String>) args[1]).toArray(new String[0]))
);
static {
PARSER.declareString(constructorArg(), new ParseField("name"));
@ -211,8 +220,7 @@ public class DetailAnalyzeResponse {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CharFilteredText that = (CharFilteredText) o;
return Objects.equals(name, that.name) &&
Arrays.equals(texts, that.texts);
return Objects.equals(name, that.name) && Arrays.equals(texts, that.texts);
}
@Override

View File

@ -44,7 +44,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
public class GetComponentTemplatesResponse {
public static final ParseField NAME = new ParseField("name");
@ -52,14 +51,18 @@ public class GetComponentTemplatesResponse {
public static final ParseField COMPONENT_TEMPLATE = new ParseField("component_template");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Map<String, ComponentTemplate>, Void> PARSER =
new ConstructingObjectParser<>("component_templates", false,
a -> ((List<NamedComponentTemplate>) a[0]).stream().collect(Collectors.toMap(n -> n.name, n -> n.componentTemplate,
(n1, n2) -> n1, LinkedHashMap::new)));
private static final ConstructingObjectParser<Map<String, ComponentTemplate>, Void> PARSER = new ConstructingObjectParser<>(
"component_templates",
false,
a -> ((List<NamedComponentTemplate>) a[0]).stream()
.collect(Collectors.toMap(n -> n.name, n -> n.componentTemplate, (n1, n2) -> n1, LinkedHashMap::new))
);
private static final ConstructingObjectParser<NamedComponentTemplate, Void> INNER_PARSER =
new ConstructingObjectParser<>("named_component_template", false,
a -> new NamedComponentTemplate((String) a[0], (ComponentTemplate) a[1]));
private static final ConstructingObjectParser<NamedComponentTemplate, Void> INNER_PARSER = new ConstructingObjectParser<>(
"named_component_template",
false,
a -> new NamedComponentTemplate((String) a[0], (ComponentTemplate) a[1])
);
static {
INNER_PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
@ -92,7 +95,6 @@ public class GetComponentTemplatesResponse {
return componentTemplates;
}
public static GetComponentTemplatesResponse fromXContent(XContentParser parser) throws IOException {
return new GetComponentTemplatesResponse(PARSER.apply(parser, null));
}
@ -117,5 +119,4 @@ public class GetComponentTemplatesResponse {
return Objects.equals(componentTemplates, other.componentTemplates);
}
}

View File

@ -44,7 +44,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
public class GetComposableIndexTemplatesResponse {
public static final ParseField NAME = new ParseField("name");
@ -52,14 +51,18 @@ public class GetComposableIndexTemplatesResponse {
public static final ParseField INDEX_TEMPLATE = new ParseField("index_template");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<Map<String, ComposableIndexTemplate>, Void> PARSER =
new ConstructingObjectParser<>("index_templates", false,
a -> ((List<NamedIndexTemplate>) a[0]).stream().collect(Collectors.toMap(n -> n.name, n -> n.indexTemplate,
(n1, n2) -> n1, LinkedHashMap::new)));
private static final ConstructingObjectParser<Map<String, ComposableIndexTemplate>, Void> PARSER = new ConstructingObjectParser<>(
"index_templates",
false,
a -> ((List<NamedIndexTemplate>) a[0]).stream()
.collect(Collectors.toMap(n -> n.name, n -> n.indexTemplate, (n1, n2) -> n1, LinkedHashMap::new))
);
private static final ConstructingObjectParser<NamedIndexTemplate, Void> INNER_PARSER =
new ConstructingObjectParser<>("named_index_template", false,
a -> new NamedIndexTemplate((String) a[0], (ComposableIndexTemplate) a[1]));
private static final ConstructingObjectParser<NamedIndexTemplate, Void> INNER_PARSER = new ConstructingObjectParser<>(
"named_index_template",
false,
a -> new NamedIndexTemplate((String) a[0], (ComposableIndexTemplate) a[1])
);
static {
INNER_PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
@ -92,7 +95,6 @@ public class GetComposableIndexTemplatesResponse {
return indexTemplates;
}
public static GetComposableIndexTemplatesResponse fromXContent(XContentParser parser) throws IOException {
return new GetComposableIndexTemplatesResponse(PARSER.apply(parser, null));
}
@ -117,5 +119,4 @@ public class GetComposableIndexTemplatesResponse {
return Objects.equals(indexTemplates, other.indexTemplates);
}
}

View File

@ -40,7 +40,6 @@ import java.util.HashSet;
import java.util.List;
import java.util.Objects;
public class GetDataStreamResponse {
private final List<DataStream> dataStreams;

View File

@ -56,8 +56,11 @@ public class GetFieldMappingsResponse {
private static final ParseField MAPPINGS = new ParseField("mappings");
private static final ObjectParser<Map<String, FieldMappingMetadata>, String> PARSER =
new ObjectParser<>(MAPPINGS.getPreferredName(), true, HashMap::new);
private static final ObjectParser<Map<String, FieldMappingMetadata>, String> PARSER = new ObjectParser<>(
MAPPINGS.getPreferredName(),
true,
HashMap::new
);
static {
PARSER.declareField((p, fieldMappings, index) -> {
@ -77,10 +80,9 @@ public class GetFieldMappingsResponse {
this.mappings = mappings;
}
/**
* Returns the fields mapping. The return map keys are indexes and fields (as specified in the request).
*/
/**
* Returns the fields mapping. The return map keys are indexes and fields (as specified in the request).
*/
public Map<String, Map<String, FieldMappingMetadata>> mappings() {
return mappings;
}
@ -99,7 +101,6 @@ public class GetFieldMappingsResponse {
return indexMapping.get(field);
}
public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
final Map<String, Map<String, FieldMappingMetadata>> mappings = new HashMap<>();
@ -118,20 +119,19 @@ public class GetFieldMappingsResponse {
private static final ParseField FULL_NAME = new ParseField("full_name");
private static final ParseField MAPPING = new ParseField("mapping");
private static final ConstructingObjectParser<FieldMappingMetadata, String> PARSER =
new ConstructingObjectParser<>("field_mapping_meta_data", true,
a -> new FieldMappingMetadata((String)a[0], (BytesReference)a[1])
);
private static final ConstructingObjectParser<FieldMappingMetadata, String> PARSER = new ConstructingObjectParser<>(
"field_mapping_meta_data",
true,
a -> new FieldMappingMetadata((String) a[0], (BytesReference) a[1])
);
static {
PARSER.declareField(optionalConstructorArg(),
(p, c) -> p.text(), FULL_NAME, ObjectParser.ValueType.STRING);
PARSER.declareField(optionalConstructorArg(),
(p, c) -> {
final XContentBuilder jsonBuilder = jsonBuilder().copyCurrentStructure(p);
final BytesReference bytes = BytesReference.bytes(jsonBuilder);
return bytes;
}, MAPPING, ObjectParser.ValueType.OBJECT);
PARSER.declareField(optionalConstructorArg(), (p, c) -> p.text(), FULL_NAME, ObjectParser.ValueType.STRING);
PARSER.declareField(optionalConstructorArg(), (p, c) -> {
final XContentBuilder jsonBuilder = jsonBuilder().copyCurrentStructure(p);
final BytesReference bytes = BytesReference.bytes(jsonBuilder);
return bytes;
}, MAPPING, ObjectParser.ValueType.OBJECT);
}
private String fullName;
@ -153,7 +153,7 @@ public class GetFieldMappingsResponse {
return XContentHelper.convertToMap(source, true, XContentType.JSON).v2();
}
//pkg-private for testing
// pkg-private for testing
BytesReference getSource() {
return source;
}
@ -162,7 +162,7 @@ public class GetFieldMappingsResponse {
return PARSER.parse(parser, null);
}
@Override
@Override
public String toString() {
return "FieldMappingMetadata{fullName='" + fullName + '\'' + ", source=" + source + '}';
}
@ -181,10 +181,9 @@ public class GetFieldMappingsResponse {
}
}
@Override
public String toString() {
return "GetFieldMappingsResponse{" + "mappings=" + mappings + '}';
return "GetFieldMappingsResponse{" + "mappings=" + mappings + '}';
}
@Override

View File

@ -141,5 +141,4 @@ public class GetIndexRequest extends TimedRequest {
return includeDefaults;
}
}

View File

@ -63,12 +63,14 @@ public class GetIndexResponse {
private Map<String, String> dataStreams;
private String[] indices;
GetIndexResponse(String[] indices,
Map<String, MappingMetadata> mappings,
Map<String, List<AliasMetadata>> aliases,
Map<String, Settings> settings,
Map<String, Settings> defaultSettings,
Map<String, String> dataStreams) {
GetIndexResponse(
String[] indices,
Map<String, MappingMetadata> mappings,
Map<String, List<AliasMetadata>> aliases,
Map<String, Settings> settings,
Map<String, Settings> defaultSettings,
Map<String, String> dataStreams
) {
this.indices = indices;
// to have deterministic order
Arrays.sort(indices);
@ -205,8 +207,14 @@ public class GetIndexResponse {
Settings indexSettings = Settings.EMPTY;
Settings indexDefaultSettings = Settings.EMPTY;
String dataStream;
IndexEntry(List<AliasMetadata> indexAliases, MappingMetadata indexMappings, Settings indexSettings, Settings indexDefaultSettings,
String dataStream) {
IndexEntry(
List<AliasMetadata> indexAliases,
MappingMetadata indexMappings,
Settings indexSettings,
Settings indexDefaultSettings,
String dataStream
) {
if (indexAliases != null) this.indexAliases = indexAliases;
if (indexMappings != null) this.indexMappings = indexMappings;
if (indexSettings != null) this.indexSettings = indexSettings;

View File

@ -39,8 +39,7 @@ import java.util.Comparator;
import java.util.List;
import java.util.Objects;
public class GetIndexTemplatesResponse {
public class GetIndexTemplatesResponse {
@Override
public String toString() {
@ -63,7 +62,6 @@ public class GetIndexTemplatesResponse {
return indexTemplates;
}
public static GetIndexTemplatesResponse fromXContent(XContentParser parser) throws IOException {
final List<IndexTemplateMetadata> templates = new ArrayList<>();
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
@ -84,12 +82,9 @@ public class GetIndexTemplatesResponse {
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
// To compare results we need to make sure the templates are listed in the same order
GetIndexTemplatesResponse other = (GetIndexTemplatesResponse) obj;
List<IndexTemplateMetadata> thisList = new ArrayList<>(this.indexTemplates);
@ -99,5 +94,4 @@ public class GetIndexTemplatesResponse {
return Objects.equals(thisList, otherList);
}
}

View File

@ -61,9 +61,7 @@ public class GetMappingsResponse {
parser.nextToken();
}
XContentParserUtils.ensureExpectedToken(parser.currentToken(),
XContentParser.Token.START_OBJECT,
parser);
XContentParserUtils.ensureExpectedToken(parser.currentToken(), XContentParser.Token.START_OBJECT, parser);
Map<String, Object> parts = parser.map();
@ -73,8 +71,9 @@ public class GetMappingsResponse {
assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass();
@SuppressWarnings("unchecked")
final Map<String, Object> fieldMappings = (Map<String, Object>) ((Map<String, ?>) entry.getValue())
.get(MAPPINGS.getPreferredName());
final Map<String, Object> fieldMappings = (Map<String, Object>) ((Map<String, ?>) entry.getValue()).get(
MAPPINGS.getPreferredName()
);
mappings.put(indexName, new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, fieldMappings));
}

View File

@ -51,25 +51,28 @@ import java.util.stream.Collectors;
import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class IndexTemplateMetadata {
public class IndexTemplateMetadata {
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<IndexTemplateMetadata, String> PARSER = new ConstructingObjectParser<>(
"IndexTemplateMetadata", true, (a, name) -> {
List<Map.Entry<String, AliasMetadata>> alias = (List<Map.Entry<String, AliasMetadata>>) a[5];
ImmutableOpenMap<String, AliasMetadata> aliasMap =
new ImmutableOpenMap.Builder<String, AliasMetadata>()
.putAll(alias.stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
.build();
return new IndexTemplateMetadata(
name,
(Integer) a[0],
(Integer) a[1],
(List<String>) a[2],
(Settings) a[3],
(MappingMetadata) a[4],
aliasMap);
});
"IndexTemplateMetadata",
true,
(a, name) -> {
List<Map.Entry<String, AliasMetadata>> alias = (List<Map.Entry<String, AliasMetadata>>) a[5];
ImmutableOpenMap<String, AliasMetadata> aliasMap = new ImmutableOpenMap.Builder<String, AliasMetadata>().putAll(
alias.stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))
).build();
return new IndexTemplateMetadata(
name,
(Integer) a[0],
(Integer) a[1],
(List<String>) a[2],
(Settings) a[3],
(MappingMetadata) a[4],
aliasMap
);
}
);
static {
PARSER.declareInt(optionalConstructorArg(), new ParseField("order"));
@ -88,8 +91,11 @@ public class IndexTemplateMetadata {
}
return new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, mapping);
}, new ParseField("mappings"));
PARSER.declareNamedObjects(optionalConstructorArg(),
(p, c, name) -> new AbstractMap.SimpleEntry<>(name, AliasMetadata.Builder.fromXContent(p)), new ParseField("aliases"));
PARSER.declareNamedObjects(
optionalConstructorArg(),
(p, c, name) -> new AbstractMap.SimpleEntry<>(name, AliasMetadata.Builder.fromXContent(p)),
new ParseField("aliases")
);
}
private final String name;
@ -124,17 +130,22 @@ public class IndexTemplateMetadata {
private final ImmutableOpenMap<String, AliasMetadata> aliases;
public IndexTemplateMetadata(String name, int order, Integer version,
List<String> patterns, Settings settings,
MappingMetadata mappings,
ImmutableOpenMap<String, AliasMetadata> aliases) {
public IndexTemplateMetadata(
String name,
int order,
Integer version,
List<String> patterns,
Settings settings,
MappingMetadata mappings,
ImmutableOpenMap<String, AliasMetadata> aliases
) {
if (patterns == null || patterns.isEmpty()) {
throw new IllegalArgumentException("Index patterns must not be null or empty; got " + patterns);
}
this.name = name;
this.order = order;
this.version = version;
this.patterns= patterns;
this.patterns = patterns;
this.settings = settings;
this.mappings = mappings;
this.aliases = aliases;
@ -178,13 +189,13 @@ public class IndexTemplateMetadata {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
IndexTemplateMetadata that = (IndexTemplateMetadata) o;
return order == that.order &&
Objects.equals(name, that.name) &&
Objects.equals(version, that.version) &&
Objects.equals(patterns, that.patterns) &&
Objects.equals(settings, that.settings) &&
Objects.equals(mappings, that.mappings) &&
Objects.equals(aliases, that.aliases);
return order == that.order
&& Objects.equals(name, that.name)
&& Objects.equals(version, that.version)
&& Objects.equals(patterns, that.patterns)
&& Objects.equals(settings, that.settings)
&& Objects.equals(mappings, that.mappings)
&& Objects.equals(aliases, that.aliases);
}
@Override
@ -269,7 +280,6 @@ public class IndexTemplateMetadata {
return new IndexTemplateMetadata(name, order, version, indexPatterns, settings, mappings, aliases.build());
}
public static IndexTemplateMetadata fromXContent(XContentParser parser, String templateName) throws IOException {
return PARSER.parse(parser, templateName);
}

View File

@ -118,7 +118,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* Sets the name of the index template.
*/
public PutIndexTemplateRequest name(String name) {
if(name == null) {
if (name == null) {
throw new IllegalArgumentException("Name cannot be null");
}
this.name = name;
@ -237,8 +237,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* @param source The mapping source
*/
public PutIndexTemplateRequest mapping(XContentBuilder source) {
internalMapping(XContentHelper.convertToMap(BytesReference.bytes(source),
true, source.contentType()).v2());
internalMapping(XContentHelper.convertToMap(BytesReference.bytes(source), true, source.contentType()).v2());
return this;
}
@ -268,8 +267,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
builder.map(source);
Objects.requireNonNull(builder.contentType());
try {
mappings = new BytesArray(
XContentHelper.convertToJson(BytesReference.bytes(builder), false, false, builder.contentType()));
mappings = new BytesArray(XContentHelper.convertToJson(BytesReference.bytes(builder), false, false, builder.contentType()));
return this;
} catch (IOException e) {
throw new UncheckedIOException("failed to convert source to json", e);
@ -303,11 +301,11 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
for (Map.Entry<String, Object> entry : source.entrySet()) {
String name = entry.getKey();
if (name.equals("template")) {
if(entry.getValue() instanceof String) {
if (entry.getValue() instanceof String) {
this.template = (String) entry.getValue();
}
} else if (name.equals("index_patterns")) {
if(entry.getValue() instanceof String) {
if (entry.getValue() instanceof String) {
patterns(Collections.singletonList((String) entry.getValue()));
} else if (entry.getValue() instanceof List) {
List<String> elements = ((List<?>) entry.getValue()).stream().map(Object::toString).collect(Collectors.toList());
@ -321,7 +319,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
if ((entry.getValue() instanceof Integer) == false) {
throw new IllegalArgumentException("Malformed [version] value, should be an integer");
}
version((Integer)entry.getValue());
version((Integer) entry.getValue());
} else if (name.equals("settings")) {
if ((entry.getValue() instanceof Map) == false) {
throw new IllegalArgumentException("Malformed [settings] section, should include an inner object");
@ -367,7 +365,6 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
return source(XContentHelper.convertToMap(source, true, xContentType).v2());
}
public Set<Alias> aliases() {
return this.aliases;
}
@ -404,15 +401,20 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
*/
public PutIndexTemplateRequest aliases(BytesReference source) {
// EMPTY is safe here because we never call namedObject
try (XContentParser parser = XContentHelper
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, source)) {
//move to the first alias
try (
XContentParser parser = XContentHelper.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
source
)
) {
// move to the first alias
parser.nextToken();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
alias(Alias.fromXContent(parser));
}
return this;
} catch(IOException e) {
} catch (IOException e) {
throw new OpenSearchParseException("Failed to parse aliases", e);
}
}
@ -457,8 +459,13 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
if (mappings != null) {
builder.field("mappings");
try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, mappings.utf8ToString())) {
try (
XContentParser parser = JsonXContent.jsonXContent.createParser(
NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
mappings.utf8ToString()
)
) {
builder.copyCurrentStructure(parser);
}
}

View File

@ -49,8 +49,11 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.constructo
public class ResizeResponse extends ShardsAcknowledgedResponse {
private static final ParseField INDEX = new ParseField("index");
private static final ConstructingObjectParser<ResizeResponse, Void> PARSER = new ConstructingObjectParser<>("resize_index",
true, args -> new ResizeResponse((boolean) args[0], (boolean) args[1], (String) args[2]));
private static final ConstructingObjectParser<ResizeResponse, Void> PARSER = new ConstructingObjectParser<>(
"resize_index",
true,
args -> new ResizeResponse((boolean) args[0], (boolean) args[1], (String) args[2])
);
static {
PARSER.declareBoolean(constructorArg(), new ParseField(AcknowledgedResponse.PARSE_FIELD_NAME));

View File

@ -51,20 +51,24 @@ public class SimulateIndexTemplateResponse {
private static final ParseField INDEX_PATTERNS = new ParseField("index_patterns");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<SimulateIndexTemplateResponse, Void> PARSER =
new ConstructingObjectParser<>("simulate_index_templates_response", false,
a -> new SimulateIndexTemplateResponse(
a[0] != null ? (Template) a[0] : null,
a[1] != null ?
((List<IndexTemplateAndPatterns>) a[1]).stream()
.collect(Collectors.toMap(IndexTemplateAndPatterns::name, IndexTemplateAndPatterns::indexPatterns)) : null
)
);
private static final ConstructingObjectParser<SimulateIndexTemplateResponse, Void> PARSER = new ConstructingObjectParser<>(
"simulate_index_templates_response",
false,
a -> new SimulateIndexTemplateResponse(
a[0] != null ? (Template) a[0] : null,
a[1] != null
? ((List<IndexTemplateAndPatterns>) a[1]).stream()
.collect(Collectors.toMap(IndexTemplateAndPatterns::name, IndexTemplateAndPatterns::indexPatterns))
: null
)
);
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<IndexTemplateAndPatterns, Void> INNER_PARSER =
new ConstructingObjectParser<>("index_template_and_patterns", false,
a -> new IndexTemplateAndPatterns((String) a[0], (List<String>) a[1]));
private static final ConstructingObjectParser<IndexTemplateAndPatterns, Void> INNER_PARSER = new ConstructingObjectParser<>(
"index_template_and_patterns",
false,
a -> new IndexTemplateAndPatterns((String) a[0], (List<String>) a[1])
);
private static class IndexTemplateAndPatterns {
String name;
@ -136,7 +140,11 @@ public class SimulateIndexTemplateResponse {
@Override
public String toString() {
return "SimulateIndexTemplateResponse{" + "resolved template=" + resolvedTemplate + ", overlapping templates="
+ String.join("|", overlappingTemplates.keySet()) + "}";
return "SimulateIndexTemplateResponse{"
+ "resolved template="
+ resolvedTemplate
+ ", overlapping templates="
+ String.join("|", overlappingTemplates.keySet())
+ "}";
}
}

View File

@ -55,7 +55,7 @@ public class RolloverRequest extends TimedRequest implements ToXContentObject {
private final String newIndexName;
private boolean dryRun;
private final Map<String, Condition<?>> conditions = new HashMap<>(2);
//the index name "_na_" is never read back, what matters are settings, mappings and aliases
// the index name "_na_" is never read back, what matters are settings, mappings and aliases
private final CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_");
public RolloverRequest(String alias, String newIndexName) {
@ -80,7 +80,6 @@ public class RolloverRequest extends TimedRequest implements ToXContentObject {
return newIndexName;
}
/**
* Sets if the rollover should not be executed when conditions are met
*/
@ -88,6 +87,7 @@ public class RolloverRequest extends TimedRequest implements ToXContentObject {
this.dryRun = dryRun;
return this;
}
/**
* Returns if the rollover should not be executed when conditions are met
*/
@ -118,6 +118,7 @@ public class RolloverRequest extends TimedRequest implements ToXContentObject {
this.conditions.put(maxDocsCondition.name(), maxDocsCondition);
return this;
}
/**
* Adds a size-based condition to check if the index size is at least <code>size</code>.
*/
@ -129,6 +130,7 @@ public class RolloverRequest extends TimedRequest implements ToXContentObject {
this.conditions.put(maxSizeCondition.name(), maxSizeCondition);
return this;
}
/**
* Returns all set conditions
*/

View File

@ -54,9 +54,19 @@ public final class RolloverResponse extends ShardsAcknowledgedResponse {
private static final ParseField CONDITIONS = new ParseField("conditions");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<RolloverResponse, Void> PARSER = new ConstructingObjectParser<>("rollover",
true, args -> new RolloverResponse((String) args[0], (String) args[1], (Map<String,Boolean>) args[2],
(Boolean)args[3], (Boolean)args[4], (Boolean) args[5], (Boolean) args[6]));
private static final ConstructingObjectParser<RolloverResponse, Void> PARSER = new ConstructingObjectParser<>(
"rollover",
true,
args -> new RolloverResponse(
(String) args[0],
(String) args[1],
(Map<String, Boolean>) args[2],
(Boolean) args[3],
(Boolean) args[4],
(Boolean) args[5],
(Boolean) args[6]
)
);
static {
PARSER.declareString(constructorArg(), OLD_INDEX);
@ -73,8 +83,15 @@ public final class RolloverResponse extends ShardsAcknowledgedResponse {
private final boolean dryRun;
private final boolean rolledOver;
public RolloverResponse(String oldIndex, String newIndex, Map<String, Boolean> conditionResults,
boolean dryRun, boolean rolledOver, boolean acknowledged, boolean shardsAcknowledged) {
public RolloverResponse(
String oldIndex,
String newIndex,
Map<String, Boolean> conditionResults,
boolean dryRun,
boolean rolledOver,
boolean acknowledged,
boolean shardsAcknowledged
) {
super(acknowledged, shardsAcknowledged);
this.oldIndex = oldIndex;
this.newIndex = newIndex;
@ -126,11 +143,11 @@ public final class RolloverResponse extends ShardsAcknowledgedResponse {
public boolean equals(Object o) {
if (super.equals(o)) {
RolloverResponse that = (RolloverResponse) o;
return dryRun == that.dryRun &&
rolledOver == that.rolledOver &&
Objects.equals(oldIndex, that.oldIndex) &&
Objects.equals(newIndex, that.newIndex) &&
Objects.equals(conditionStatus, that.conditionStatus);
return dryRun == that.dryRun
&& rolledOver == that.rolledOver
&& Objects.equals(oldIndex, that.oldIndex)
&& Objects.equals(newIndex, that.newIndex)
&& Objects.equals(conditionStatus, that.conditionStatus);
}
return false;
}

View File

@ -43,9 +43,11 @@ import java.io.IOException;
public class ExecuteSnapshotLifecyclePolicyResponse implements ToXContentObject {
private static final ParseField SNAPSHOT_NAME = new ParseField("snapshot_name");
private static final ConstructingObjectParser<ExecuteSnapshotLifecyclePolicyResponse, Void> PARSER =
new ConstructingObjectParser<>("excecute_snapshot_policy", true,
a -> new ExecuteSnapshotLifecyclePolicyResponse((String) a[0]));
private static final ConstructingObjectParser<ExecuteSnapshotLifecyclePolicyResponse, Void> PARSER = new ConstructingObjectParser<>(
"excecute_snapshot_policy",
true,
a -> new ExecuteSnapshotLifecyclePolicyResponse((String) a[0])
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), SNAPSHOT_NAME);

View File

@ -34,5 +34,4 @@ package org.opensearch.client.slm;
import org.opensearch.client.TimedRequest;
public class ExecuteSnapshotLifecycleRetentionRequest extends TimedRequest {
}
public class ExecuteSnapshotLifecycleRetentionRequest extends TimedRequest {}

View File

@ -50,9 +50,11 @@ public class SnapshotInvocationRecord implements ToXContentObject {
private long timestamp;
private String details;
public static final ConstructingObjectParser<SnapshotInvocationRecord, String> PARSER =
new ConstructingObjectParser<>("snapshot_policy_invocation_record", true,
a -> new SnapshotInvocationRecord((String) a[0], (long) a[1], (String) a[2]));
public static final ConstructingObjectParser<SnapshotInvocationRecord, String> PARSER = new ConstructingObjectParser<>(
"snapshot_policy_invocation_record",
true,
a -> new SnapshotInvocationRecord((String) a[0], (long) a[1], (String) a[2])
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), SNAPSHOT_NAME);
@ -101,9 +103,9 @@ public class SnapshotInvocationRecord implements ToXContentObject {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SnapshotInvocationRecord that = (SnapshotInvocationRecord) o;
return getTimestamp() == that.getTimestamp() &&
Objects.equals(getSnapshotName(), that.getSnapshotName()) &&
Objects.equals(getDetails(), that.getDetails());
return getTimestamp() == that.getTimestamp()
&& Objects.equals(getSnapshotName(), that.getSnapshotName())
&& Objects.equals(getDetails(), that.getDetails());
}
@Override

View File

@ -34,5 +34,4 @@ package org.opensearch.client.slm;
import org.opensearch.client.TimedRequest;
public class SnapshotLifecycleManagementStatusRequest extends TimedRequest {
}
public class SnapshotLifecycleManagementStatusRequest extends TimedRequest {}

View File

@ -60,16 +60,18 @@ public class SnapshotLifecyclePolicy implements ToXContentObject {
private static final ParseField RETENTION = new ParseField("retention");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<SnapshotLifecyclePolicy, String> PARSER =
new ConstructingObjectParser<>("snapshot_lifecycle", true,
(a, id) -> {
String name = (String) a[0];
String schedule = (String) a[1];
String repo = (String) a[2];
Map<String, Object> config = (Map<String, Object>) a[3];
SnapshotRetentionConfiguration retention = (SnapshotRetentionConfiguration) a[4];
return new SnapshotLifecyclePolicy(id, name, schedule, repo, config, retention);
});
private static final ConstructingObjectParser<SnapshotLifecyclePolicy, String> PARSER = new ConstructingObjectParser<>(
"snapshot_lifecycle",
true,
(a, id) -> {
String name = (String) a[0];
String schedule = (String) a[1];
String repo = (String) a[2];
Map<String, Object> config = (Map<String, Object>) a[3];
SnapshotRetentionConfiguration retention = (SnapshotRetentionConfiguration) a[4];
return new SnapshotLifecyclePolicy(id, name, schedule, repo, config, retention);
}
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
@ -79,9 +81,14 @@ public class SnapshotLifecyclePolicy implements ToXContentObject {
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SnapshotRetentionConfiguration::parse, RETENTION);
}
public SnapshotLifecyclePolicy(final String id, final String name, final String schedule,
final String repository, @Nullable final Map<String, Object> configuration,
@Nullable final SnapshotRetentionConfiguration retentionPolicy) {
public SnapshotLifecyclePolicy(
final String id,
final String name,
final String schedule,
final String repository,
@Nullable final Map<String, Object> configuration,
@Nullable final SnapshotRetentionConfiguration retentionPolicy
) {
this.id = Objects.requireNonNull(id, "policy id is required");
this.name = Objects.requireNonNull(name, "policy snapshot name is required");
this.schedule = Objects.requireNonNull(schedule, "policy schedule is required");
@ -151,12 +158,12 @@ public class SnapshotLifecyclePolicy implements ToXContentObject {
return false;
}
SnapshotLifecyclePolicy other = (SnapshotLifecyclePolicy) obj;
return Objects.equals(id, other.id) &&
Objects.equals(name, other.name) &&
Objects.equals(schedule, other.schedule) &&
Objects.equals(repository, other.repository) &&
Objects.equals(configuration, other.configuration) &&
Objects.equals(retentionPolicy, other.retentionPolicy);
return Objects.equals(id, other.id)
&& Objects.equals(name, other.name)
&& Objects.equals(schedule, other.schedule)
&& Objects.equals(repository, other.repository)
&& Objects.equals(configuration, other.configuration)
&& Objects.equals(retentionPolicy, other.retentionPolicy);
}
@Override

View File

@ -70,20 +70,20 @@ public class SnapshotLifecyclePolicyMetadata implements ToXContentObject {
private final SnapshotLifecycleStats.SnapshotPolicyStats policyStats;
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<SnapshotLifecyclePolicyMetadata, String> PARSER =
new ConstructingObjectParser<>("snapshot_policy_metadata",
a -> {
SnapshotLifecyclePolicy policy = (SnapshotLifecyclePolicy) a[0];
long version = (long) a[1];
long modifiedDate = (long) a[2];
SnapshotInvocationRecord lastSuccess = (SnapshotInvocationRecord) a[3];
SnapshotInvocationRecord lastFailure = (SnapshotInvocationRecord) a[4];
long nextExecution = (long) a[5];
SnapshotInProgress sip = (SnapshotInProgress) a[6];
SnapshotLifecycleStats.SnapshotPolicyStats stats = (SnapshotLifecycleStats.SnapshotPolicyStats) a[7];
return new SnapshotLifecyclePolicyMetadata(policy, version, modifiedDate, lastSuccess,
lastFailure, nextExecution, sip, stats);
});
public static final ConstructingObjectParser<SnapshotLifecyclePolicyMetadata, String> PARSER = new ConstructingObjectParser<>(
"snapshot_policy_metadata",
a -> {
SnapshotLifecyclePolicy policy = (SnapshotLifecyclePolicy) a[0];
long version = (long) a[1];
long modifiedDate = (long) a[2];
SnapshotInvocationRecord lastSuccess = (SnapshotInvocationRecord) a[3];
SnapshotInvocationRecord lastFailure = (SnapshotInvocationRecord) a[4];
long nextExecution = (long) a[5];
SnapshotInProgress sip = (SnapshotInProgress) a[6];
SnapshotLifecycleStats.SnapshotPolicyStats stats = (SnapshotLifecycleStats.SnapshotPolicyStats) a[7];
return new SnapshotLifecyclePolicyMetadata(policy, version, modifiedDate, lastSuccess, lastFailure, nextExecution, sip, stats);
}
);
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(), SnapshotLifecyclePolicy::parse, POLICY);
@ -93,8 +93,11 @@ public class SnapshotLifecyclePolicyMetadata implements ToXContentObject {
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SnapshotInvocationRecord::parse, LAST_FAILURE);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), NEXT_EXECUTION_MILLIS);
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SnapshotInProgress::parse, SNAPSHOT_IN_PROGRESS);
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
(p, c) -> SnapshotLifecycleStats.SnapshotPolicyStats.parse(p, "policy"), POLICY_STATS);
PARSER.declareObject(
ConstructingObjectParser.constructorArg(),
(p, c) -> SnapshotLifecycleStats.SnapshotPolicyStats.parse(p, "policy"),
POLICY_STATS
);
}
@ -102,11 +105,16 @@ public class SnapshotLifecyclePolicyMetadata implements ToXContentObject {
return PARSER.apply(parser, id);
}
public SnapshotLifecyclePolicyMetadata(SnapshotLifecyclePolicy policy, long version, long modifiedDate,
SnapshotInvocationRecord lastSuccess, SnapshotInvocationRecord lastFailure,
long nextExecution,
@Nullable SnapshotInProgress snapshotInProgress,
SnapshotLifecycleStats.SnapshotPolicyStats policyStats) {
public SnapshotLifecyclePolicyMetadata(
SnapshotLifecyclePolicy policy,
long version,
long modifiedDate,
SnapshotInvocationRecord lastSuccess,
SnapshotInvocationRecord lastFailure,
long nextExecution,
@Nullable SnapshotInProgress snapshotInProgress,
SnapshotLifecycleStats.SnapshotPolicyStats policyStats
) {
this.policy = policy;
this.version = version;
this.modifiedDate = modifiedDate;
@ -191,13 +199,13 @@ public class SnapshotLifecyclePolicyMetadata implements ToXContentObject {
return false;
}
SnapshotLifecyclePolicyMetadata other = (SnapshotLifecyclePolicyMetadata) obj;
return Objects.equals(policy, other.policy) &&
Objects.equals(version, other.version) &&
Objects.equals(modifiedDate, other.modifiedDate) &&
Objects.equals(lastSuccess, other.lastSuccess) &&
Objects.equals(lastFailure, other.lastFailure) &&
Objects.equals(nextExecution, other.nextExecution) &&
Objects.equals(policyStats, other.policyStats);
return Objects.equals(policy, other.policy)
&& Objects.equals(version, other.version)
&& Objects.equals(modifiedDate, other.modifiedDate)
&& Objects.equals(lastSuccess, other.lastSuccess)
&& Objects.equals(lastFailure, other.lastFailure)
&& Objects.equals(nextExecution, other.nextExecution)
&& Objects.equals(policyStats, other.policyStats);
}
public static class SnapshotInProgress implements ToXContentObject {
@ -207,14 +215,17 @@ public class SnapshotLifecyclePolicyMetadata implements ToXContentObject {
private static final ParseField START_TIME = new ParseField("start_time_millis");
private static final ParseField FAILURE = new ParseField("failure");
private static final ConstructingObjectParser<SnapshotInProgress, Void> PARSER =
new ConstructingObjectParser<>("snapshot_in_progress", true, a -> {
private static final ConstructingObjectParser<SnapshotInProgress, Void> PARSER = new ConstructingObjectParser<>(
"snapshot_in_progress",
true,
a -> {
SnapshotId id = new SnapshotId((String) a[0], (String) a[1]);
String state = (String) a[2];
long start = (long) a[3];
String failure = (String) a[4];
return new SnapshotInProgress(id, state, start, failure);
});
}
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
@ -269,10 +280,10 @@ public class SnapshotLifecyclePolicyMetadata implements ToXContentObject {
return false;
}
SnapshotInProgress other = (SnapshotInProgress) obj;
return Objects.equals(snapshotId, other.snapshotId) &&
Objects.equals(state, other.state) &&
startTime == other.startTime &&
Objects.equals(failure, other.failure);
return Objects.equals(snapshotId, other.snapshotId)
&& Objects.equals(state, other.state)
&& startTime == other.startTime
&& Objects.equals(failure, other.failure);
}
@Override

View File

@ -68,19 +68,20 @@ public class SnapshotLifecycleStats implements ToXContentObject {
public static final ParseField TOTAL_DELETIONS = new ParseField("total_snapshots_deleted");
public static final ParseField TOTAL_DELETION_FAILURES = new ParseField("total_snapshot_deletion_failures");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<SnapshotLifecycleStats, Void> PARSER =
new ConstructingObjectParser<>("snapshot_policy_stats", true,
a -> {
long runs = (long) a[0];
long failed = (long) a[1];
long timedOut = (long) a[2];
long timeMs = (long) a[3];
Map<String, SnapshotPolicyStats> policyStatsMap = ((List<SnapshotPolicyStats>) a[4]).stream()
.collect(Collectors.toMap(m -> m.policyId, Function.identity()));
return new SnapshotLifecycleStats(runs, failed, timedOut, timeMs, policyStatsMap);
});
private static final ConstructingObjectParser<SnapshotLifecycleStats, Void> PARSER = new ConstructingObjectParser<>(
"snapshot_policy_stats",
true,
a -> {
long runs = (long) a[0];
long failed = (long) a[1];
long timedOut = (long) a[2];
long timeMs = (long) a[3];
Map<String, SnapshotPolicyStats> policyStatsMap = ((List<SnapshotPolicyStats>) a[4]).stream()
.collect(Collectors.toMap(m -> m.policyId, Function.identity()));
return new SnapshotLifecycleStats(runs, failed, timedOut, timeMs, policyStatsMap);
}
);
static {
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_RUNS);
@ -91,8 +92,13 @@ public class SnapshotLifecycleStats implements ToXContentObject {
}
// Package visible for testing
private SnapshotLifecycleStats(long retentionRuns, long retentionFailed, long retentionTimedOut, long retentionTimeMs,
Map<String, SnapshotPolicyStats> policyStats) {
private SnapshotLifecycleStats(
long retentionRuns,
long retentionFailed,
long retentionTimedOut,
long retentionTimeMs,
Map<String, SnapshotPolicyStats> policyStats
) {
this.retentionRunCount = retentionRuns;
this.retentionFailedCount = retentionFailed;
this.retentionTimedOut = retentionTimedOut;
@ -172,11 +178,11 @@ public class SnapshotLifecycleStats implements ToXContentObject {
return false;
}
SnapshotLifecycleStats other = (SnapshotLifecycleStats) obj;
return retentionRunCount == other.retentionRunCount &&
retentionFailedCount == other.retentionFailedCount &&
retentionTimedOut == other.retentionTimedOut &&
retentionTimeMs == other.retentionTimeMs &&
Objects.equals(policyStats, other.policyStats);
return retentionRunCount == other.retentionRunCount
&& retentionFailedCount == other.retentionFailedCount
&& retentionTimedOut == other.retentionTimedOut
&& retentionTimeMs == other.retentionTimeMs
&& Objects.equals(policyStats, other.policyStats);
}
@Override
@ -197,16 +203,18 @@ public class SnapshotLifecycleStats implements ToXContentObject {
static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted");
static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures");
private static final ConstructingObjectParser<SnapshotPolicyStats, Void> PARSER =
new ConstructingObjectParser<>("snapshot_policy_stats", true,
a -> {
String id = (String) a[0];
long taken = (long) a[1];
long failed = (long) a[2];
long deleted = (long) a[3];
long deleteFailed = (long) a[4];
return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed);
});
private static final ConstructingObjectParser<SnapshotPolicyStats, Void> PARSER = new ConstructingObjectParser<>(
"snapshot_policy_stats",
true,
a -> {
String id = (String) a[0];
long taken = (long) a[1];
long failed = (long) a[2];
long deleted = (long) a[3];
long deleteFailed = (long) a[4];
return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed);
}
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID);
@ -262,11 +270,11 @@ public class SnapshotLifecycleStats implements ToXContentObject {
return false;
}
SnapshotPolicyStats other = (SnapshotPolicyStats) obj;
return Objects.equals(policyId, other.policyId) &&
snapshotsTaken == other.snapshotsTaken &&
snapshotsFailed == other.snapshotsFailed &&
snapshotsDeleted == other.snapshotsDeleted &&
snapshotDeleteFailures == other.snapshotDeleteFailures;
return Objects.equals(policyId, other.policyId)
&& snapshotsTaken == other.snapshotsTaken
&& snapshotsFailed == other.snapshotsFailed
&& snapshotsDeleted == other.snapshotsDeleted
&& snapshotDeleteFailures == other.snapshotDeleteFailures;
}
@Override

View File

@ -52,13 +52,16 @@ public class SnapshotRetentionConfiguration implements ToXContentObject {
private static final ParseField MINIMUM_SNAPSHOT_COUNT = new ParseField("min_count");
private static final ParseField MAXIMUM_SNAPSHOT_COUNT = new ParseField("max_count");
private static final ConstructingObjectParser<SnapshotRetentionConfiguration, Void> PARSER =
new ConstructingObjectParser<>("snapshot_retention", true, a -> {
private static final ConstructingObjectParser<SnapshotRetentionConfiguration, Void> PARSER = new ConstructingObjectParser<>(
"snapshot_retention",
true,
a -> {
TimeValue expireAfter = a[0] == null ? null : TimeValue.parseTimeValue((String) a[0], EXPIRE_AFTER.getPreferredName());
Integer minCount = (Integer) a[1];
Integer maxCount = (Integer) a[2];
return new SnapshotRetentionConfiguration(expireAfter, minCount, maxCount);
});
}
);
static {
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), EXPIRE_AFTER);
@ -70,9 +73,11 @@ public class SnapshotRetentionConfiguration implements ToXContentObject {
private final Integer minimumSnapshotCount;
private final Integer maximumSnapshotCount;
public SnapshotRetentionConfiguration(@Nullable TimeValue expireAfter,
@Nullable Integer minimumSnapshotCount,
@Nullable Integer maximumSnapshotCount) {
public SnapshotRetentionConfiguration(
@Nullable TimeValue expireAfter,
@Nullable Integer minimumSnapshotCount,
@Nullable Integer maximumSnapshotCount
) {
this.expireAfter = expireAfter;
this.minimumSnapshotCount = minimumSnapshotCount;
this.maximumSnapshotCount = maximumSnapshotCount;
@ -83,8 +88,12 @@ public class SnapshotRetentionConfiguration implements ToXContentObject {
throw new IllegalArgumentException("maximum snapshot count must be at least 1, but was: " + this.maximumSnapshotCount);
}
if ((maximumSnapshotCount != null && minimumSnapshotCount != null) && this.minimumSnapshotCount > this.maximumSnapshotCount) {
throw new IllegalArgumentException("minimum snapshot count " + this.minimumSnapshotCount +
" cannot be larger than maximum snapshot count " + this.maximumSnapshotCount);
throw new IllegalArgumentException(
"minimum snapshot count "
+ this.minimumSnapshotCount
+ " cannot be larger than maximum snapshot count "
+ this.maximumSnapshotCount
);
}
}
@ -134,9 +143,9 @@ public class SnapshotRetentionConfiguration implements ToXContentObject {
return false;
}
SnapshotRetentionConfiguration other = (SnapshotRetentionConfiguration) obj;
return Objects.equals(this.expireAfter, other.expireAfter) &&
Objects.equals(minimumSnapshotCount, other.minimumSnapshotCount) &&
Objects.equals(maximumSnapshotCount, other.maximumSnapshotCount);
return Objects.equals(this.expireAfter, other.expireAfter)
&& Objects.equals(minimumSnapshotCount, other.minimumSnapshotCount)
&& Objects.equals(maximumSnapshotCount, other.maximumSnapshotCount);
}
@Override

View File

@ -34,5 +34,4 @@ package org.opensearch.client.slm;
import org.opensearch.client.TimedRequest;
public class StartSLMRequest extends TimedRequest {
}
public class StartSLMRequest extends TimedRequest {}

View File

@ -34,5 +34,4 @@ package org.opensearch.client.slm;
import org.opensearch.client.TimedRequest;
public class StopSLMRequest extends TimedRequest {
}
public class StopSLMRequest extends TimedRequest {}

View File

@ -48,7 +48,7 @@ public class CancelTasksRequest implements Validatable {
private Optional<TaskId> taskId = Optional.empty();
private Boolean waitForCompletion;
CancelTasksRequest(){}
CancelTasksRequest() {}
void setNodes(List<String> nodes) {
this.nodes.addAll(nodes);
@ -103,12 +103,12 @@ public class CancelTasksRequest implements Validatable {
if (this == o) return true;
if (!(o instanceof CancelTasksRequest)) return false;
CancelTasksRequest that = (CancelTasksRequest) o;
return Objects.equals(getNodes(), that.getNodes()) &&
Objects.equals(getActions(), that.getActions()) &&
Objects.equals(getTimeout(), that.getTimeout()) &&
Objects.equals(getParentTaskId(), that.getParentTaskId()) &&
Objects.equals(getTaskId(), that.getTaskId()) &&
Objects.equals(waitForCompletion, that.waitForCompletion);
return Objects.equals(getNodes(), that.getNodes())
&& Objects.equals(getActions(), that.getActions())
&& Objects.equals(getTimeout(), that.getTimeout())
&& Objects.equals(getParentTaskId(), that.getParentTaskId())
&& Objects.equals(getTaskId(), that.getTaskId())
&& Objects.equals(waitForCompletion, that.waitForCompletion);
}
@Override
@ -118,14 +118,20 @@ public class CancelTasksRequest implements Validatable {
@Override
public String toString() {
return "CancelTasksRequest{" +
"nodes=" + nodes +
", actions=" + actions +
", timeout=" + timeout +
", parentTaskId=" + parentTaskId +
", taskId=" + taskId +
", waitForCompletion=" + waitForCompletion +
'}';
return "CancelTasksRequest{"
+ "nodes="
+ nodes
+ ", actions="
+ actions
+ ", timeout="
+ timeout
+ ", parentTaskId="
+ parentTaskId
+ ", taskId="
+ taskId
+ ", waitForCompletion="
+ waitForCompletion
+ '}';
}
public static class Builder {
@ -136,28 +142,28 @@ public class CancelTasksRequest implements Validatable {
private List<String> nodesFilter = new ArrayList<>();
private Boolean waitForCompletion;
public Builder withTimeout(TimeValue timeout){
public Builder withTimeout(TimeValue timeout) {
this.timeout = Optional.of(timeout);
return this;
}
public Builder withTaskId(TaskId taskId){
public Builder withTaskId(TaskId taskId) {
this.taskId = Optional.of(taskId);
return this;
}
public Builder withParentTaskId(TaskId taskId){
public Builder withParentTaskId(TaskId taskId) {
this.parentTaskId = Optional.of(taskId);
return this;
}
public Builder withActionsFiltered(List<String> actions){
public Builder withActionsFiltered(List<String> actions) {
this.actionsFilter.clear();
this.actionsFilter.addAll(actions);
return this;
}
public Builder withNodesFiltered(List<String> nodes){
public Builder withNodesFiltered(List<String> nodes) {
this.nodesFilter.clear();
this.nodesFilter.addAll(nodes);
return this;

View File

@ -48,9 +48,7 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalCo
*/
public class CancelTasksResponse extends ListTasksResponse {
CancelTasksResponse(List<NodeData> nodesInfoData,
List<TaskOperationFailure> taskFailures,
List<OpenSearchException> nodeFailures) {
CancelTasksResponse(List<NodeData> nodesInfoData, List<TaskOperationFailure> taskFailures, List<OpenSearchException> nodeFailures) {
super(nodesInfoData, taskFailures, nodeFailures);
}
@ -61,7 +59,9 @@ public class CancelTasksResponse extends ListTasksResponse {
private static ConstructingObjectParser<CancelTasksResponse, Void> PARSER;
static {
ConstructingObjectParser<CancelTasksResponse, Void> parser = new ConstructingObjectParser<>("cancel_tasks_response", true,
ConstructingObjectParser<CancelTasksResponse, Void> parser = new ConstructingObjectParser<>(
"cancel_tasks_response",
true,
constructingObjects -> {
int i = 0;
@SuppressWarnings("unchecked")
@ -71,12 +71,15 @@ public class CancelTasksResponse extends ListTasksResponse {
@SuppressWarnings("unchecked")
List<NodeData> nodesInfoData = (List<NodeData>) constructingObjects[i];
return new CancelTasksResponse(nodesInfoData, tasksFailures, nodeFailures);
});
}
);
parser.declareObjectArray(optionalConstructorArg(), (p, c) ->
TaskOperationFailure.fromXContent(p), new ParseField("task_failures"));
parser.declareObjectArray(optionalConstructorArg(), (p, c) ->
OpenSearchException.fromXContent(p), new ParseField("node_failures"));
parser.declareObjectArray(
optionalConstructorArg(),
(p, c) -> TaskOperationFailure.fromXContent(p),
new ParseField("task_failures")
);
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> OpenSearchException.fromXContent(p), new ParseField("node_failures"));
parser.declareNamedObjects(optionalConstructorArg(), NodeData.PARSER, new ParseField("nodes"));
PARSER = parser;
}
@ -93,12 +96,17 @@ public class CancelTasksResponse extends ListTasksResponse {
@Override
public String toString() {
return "CancelTasksResponse{" +
"taskFailures=" + taskFailures +
", nodeFailures=" + nodeFailures +
", nodesInfoData=" + nodesInfoData +
", tasks=" + tasks +
", taskGroups=" + taskGroups +
'}';
return "CancelTasksResponse{"
+ "taskFailures="
+ taskFailures
+ ", nodeFailures="
+ nodeFailures
+ ", nodesInfoData="
+ nodesInfoData
+ ", tasks="
+ tasks
+ ", taskGroups="
+ taskGroups
+ '}';
}
}

View File

@ -113,9 +113,9 @@ public class GetTaskRequest implements Validatable {
return false;
}
GetTaskRequest other = (GetTaskRequest) obj;
return Objects.equals(nodeId, other.nodeId) &&
taskId == other.taskId &&
waitForCompletion == other.waitForCompletion &&
Objects.equals(timeout, other.timeout);
return Objects.equals(nodeId, other.nodeId)
&& taskId == other.taskId
&& waitForCompletion == other.waitForCompletion
&& Objects.equals(timeout, other.timeout);
}
}

View File

@ -57,8 +57,11 @@ public class GetTaskResponse {
return taskInfo;
}
private static final ConstructingObjectParser<GetTaskResponse, Void> PARSER = new ConstructingObjectParser<>("get_task",
true, a -> new GetTaskResponse((boolean) a[0], (TaskInfo) a[1]));
private static final ConstructingObjectParser<GetTaskResponse, Void> PARSER = new ConstructingObjectParser<>(
"get_task",
true,
a -> new GetTaskResponse((boolean) a[0], (TaskInfo) a[1])
);
static {
PARSER.declareBoolean(constructorArg(), COMPLETED);
PARSER.declareObject(constructorArg(), (p, c) -> TaskInfo.fromXContent(p), TASK);

View File

@ -50,9 +50,7 @@ public class ListTasksResponse {
protected final List<TaskInfo> tasks = new ArrayList<>();
protected final List<TaskGroup> taskGroups = new ArrayList<>();
ListTasksResponse(List<NodeData> nodesInfoData,
List<TaskOperationFailure> taskFailures,
List<OpenSearchException> nodeFailures) {
ListTasksResponse(List<NodeData> nodesInfoData, List<TaskOperationFailure> taskFailures, List<OpenSearchException> nodeFailures) {
if (taskFailures != null) {
this.taskFailures.addAll(taskFailures);
}
@ -62,12 +60,7 @@ public class ListTasksResponse {
if (nodesInfoData != null) {
this.nodesInfoData.addAll(nodesInfoData);
}
this.tasks.addAll(this
.nodesInfoData
.stream()
.flatMap(nodeData -> nodeData.getTasks().stream())
.collect(toList())
);
this.tasks.addAll(this.nodesInfoData.stream().flatMap(nodeData -> nodeData.getTasks().stream()).collect(toList()));
this.taskGroups.addAll(buildTaskGroups());
}
@ -104,9 +97,7 @@ public class ListTasksResponse {
}
public Map<String, List<TaskInfo>> getPerNodeTasks() {
return getTasks()
.stream()
.collect(groupingBy(TaskInfo::getNodeId));
return getTasks().stream().collect(groupingBy(TaskInfo::getNodeId));
}
public List<TaskOperationFailure> getTaskFailures() {
@ -126,12 +117,11 @@ public class ListTasksResponse {
if (this == o) return true;
if (!(o instanceof ListTasksResponse)) return false;
ListTasksResponse response = (ListTasksResponse) o;
return nodesInfoData.equals(response.nodesInfoData) &&
Objects.equals
(getTaskFailures(), response.getTaskFailures()) &&
Objects.equals(getNodeFailures(), response.getNodeFailures()) &&
Objects.equals(getTasks(), response.getTasks()) &&
Objects.equals(getTaskGroups(), response.getTaskGroups());
return nodesInfoData.equals(response.nodesInfoData)
&& Objects.equals(getTaskFailures(), response.getTaskFailures())
&& Objects.equals(getNodeFailures(), response.getNodeFailures())
&& Objects.equals(getTasks(), response.getTasks())
&& Objects.equals(getTaskGroups(), response.getTaskGroups());
}
@Override
@ -141,12 +131,17 @@ public class ListTasksResponse {
@Override
public String toString() {
return "CancelTasksResponse{" +
"nodesInfoData=" + nodesInfoData +
", taskFailures=" + taskFailures +
", nodeFailures=" + nodeFailures +
", tasks=" + tasks +
", taskGroups=" + taskGroups +
'}';
return "CancelTasksResponse{"
+ "nodesInfoData="
+ nodesInfoData
+ ", taskFailures="
+ taskFailures
+ ", nodeFailures="
+ nodeFailures
+ ", tasks="
+ tasks
+ ", taskGroups="
+ taskGroups
+ '}';
}
}

View File

@ -48,7 +48,7 @@ class NodeData {
private String host;
private String ip;
private final List<String> roles = new ArrayList<>();
private final Map<String,String> attributes = new HashMap<>();
private final Map<String, String> attributes = new HashMap<>();
private final List<TaskInfo> tasks = new ArrayList<>();
NodeData(String nodeId) {
@ -60,7 +60,7 @@ class NodeData {
}
public void setAttributes(Map<String, String> attributes) {
if(attributes!=null){
if (attributes != null) {
this.attributes.putAll(attributes);
}
}
@ -78,7 +78,7 @@ class NodeData {
}
void setRoles(List<String> roles) {
if(roles!=null){
if (roles != null) {
this.roles.addAll(roles);
}
}
@ -116,22 +116,34 @@ class NodeData {
}
void setTasks(List<TaskInfo> tasks) {
if(tasks!=null){
if (tasks != null) {
this.tasks.addAll(tasks);
}
}
@Override
public String toString() {
return "NodeData{" +
"nodeId='" + nodeId + '\'' +
", name='" + name + '\'' +
", transportAddress='" + transportAddress + '\'' +
", host='" + host + '\'' +
", ip='" + ip + '\'' +
", roles=" + roles +
", attributes=" + attributes +
'}';
return "NodeData{"
+ "nodeId='"
+ nodeId
+ '\''
+ ", name='"
+ name
+ '\''
+ ", transportAddress='"
+ transportAddress
+ '\''
+ ", host='"
+ host
+ '\''
+ ", ip='"
+ ip
+ '\''
+ ", roles="
+ roles
+ ", attributes="
+ attributes
+ '}';
}
@Override
@ -139,14 +151,14 @@ class NodeData {
if (this == o) return true;
if (!(o instanceof NodeData)) return false;
NodeData nodeData = (NodeData) o;
return Objects.equals(getNodeId(), nodeData.getNodeId()) &&
Objects.equals(getName(), nodeData.getName()) &&
Objects.equals(getTransportAddress(), nodeData.getTransportAddress()) &&
Objects.equals(getHost(), nodeData.getHost()) &&
Objects.equals(getIp(), nodeData.getIp()) &&
Objects.equals(getRoles(), nodeData.getRoles()) &&
Objects.equals(getAttributes(), nodeData.getAttributes()) &&
Objects.equals(getTasks(), nodeData.getTasks());
return Objects.equals(getNodeId(), nodeData.getNodeId())
&& Objects.equals(getName(), nodeData.getName())
&& Objects.equals(getTransportAddress(), nodeData.getTransportAddress())
&& Objects.equals(getHost(), nodeData.getHost())
&& Objects.equals(getIp(), nodeData.getIp())
&& Objects.equals(getRoles(), nodeData.getRoles())
&& Objects.equals(getAttributes(), nodeData.getAttributes())
&& Objects.equals(getTasks(), nodeData.getTasks());
}
@Override
@ -163,10 +175,7 @@ class NodeData {
parser.declareString(NodeData::setHost, new ParseField("host"));
parser.declareString(NodeData::setIp, new ParseField("ip"));
parser.declareStringArray(NodeData::setRoles, new ParseField("roles"));
parser.declareField(NodeData::setAttributes,
(p, c) -> p.mapStrings(),
new ParseField("attributes"),
ObjectParser.ValueType.OBJECT);
parser.declareField(NodeData::setAttributes, (p, c) -> p.mapStrings(), new ParseField("attributes"), ObjectParser.ValueType.OBJECT);
parser.declareNamedObjects(NodeData::setTasks, TaskInfo.PARSER, new ParseField("tasks"));
PARSER = (XContentParser p, Void v, String nodeId) -> parser.parse(p, new NodeData(nodeId), null);
}

View File

@ -30,6 +30,7 @@
*/
package org.opensearch.client.tasks;
import org.opensearch.common.ParseField;
import org.opensearch.common.xcontent.XContentParser;
import java.io.IOException;
@ -83,7 +84,7 @@ public class OpenSearchException {
return suppressed;
}
void addSuppressed(List<OpenSearchException> suppressed){
void addSuppressed(List<OpenSearchException> suppressed) {
this.suppressed.addAll(suppressed);
}
@ -191,7 +192,7 @@ public class OpenSearchException {
}
void addHeader(String key, List<String> value) {
headers.put(key,value);
headers.put(key, value);
}
@ -215,10 +216,10 @@ public class OpenSearchException {
if (this == o) return true;
if (!(o instanceof OpenSearchException)) return false;
OpenSearchException that = (OpenSearchException) o;
return Objects.equals(getMsg(), that.getMsg()) &&
Objects.equals(getCause(), that.getCause()) &&
Objects.equals(getHeaders(), that.getHeaders()) &&
Objects.equals(getSuppressed(), that.getSuppressed());
return Objects.equals(getMsg(), that.getMsg())
&& Objects.equals(getCause(), that.getCause())
&& Objects.equals(getHeaders(), that.getHeaders())
&& Objects.equals(getSuppressed(), that.getSuppressed());
}
@Override
@ -228,11 +229,16 @@ public class OpenSearchException {
@Override
public String toString() {
return "OpenSearchException{" +
"msg='" + msg + '\'' +
", cause=" + cause +
", headers=" + headers +
", suppressed=" + suppressed +
'}';
return "OpenSearchException{"
+ "msg='"
+ msg
+ '\''
+ ", cause="
+ cause
+ ", headers="
+ headers
+ ", suppressed="
+ suppressed
+ '}';
}
}

View File

@ -47,10 +47,7 @@ public class TaskGroup {
@Override
public String toString() {
return "TaskGroup{" +
"task=" + task +
", childTasks=" + childTasks +
'}';
return "TaskGroup{" + "task=" + task + ", childTasks=" + childTasks + '}';
}
private final List<TaskGroup> childTasks = new ArrayList<>();
@ -69,8 +66,7 @@ public class TaskGroup {
if (this == o) return true;
if (!(o instanceof TaskGroup)) return false;
TaskGroup taskGroup = (TaskGroup) o;
return Objects.equals(task, taskGroup.task) &&
Objects.equals(getChildTasks(), taskGroup.getChildTasks());
return Objects.equals(task, taskGroup.task) && Objects.equals(getChildTasks(), taskGroup.getChildTasks());
}
@Override
@ -96,10 +92,7 @@ public class TaskGroup {
}
public TaskGroup build() {
return new TaskGroup(
taskInfo,
childTasks.stream().map(TaskGroup.Builder::build).collect(Collectors.toList())
);
return new TaskGroup(taskInfo, childTasks.stream().map(TaskGroup.Builder::build).collect(Collectors.toList()));
}
}
@ -111,4 +104,3 @@ public class TaskGroup {
return childTasks;
}
}

View File

@ -87,14 +87,12 @@ public class TaskId {
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof TaskId)) return false;
TaskId taskId = (TaskId) o;
return getId() == taskId.getId() &&
Objects.equals(getNodeId(), taskId.getNodeId());
return getId() == taskId.getId() && Objects.equals(getNodeId(), taskId.getNodeId());
}
@Override

View File

@ -168,40 +168,60 @@ public class TaskInfo {
if (this == o) return true;
if (!(o instanceof TaskInfo)) return false;
TaskInfo taskInfo = (TaskInfo) o;
return getStartTime() == taskInfo.getStartTime() &&
getRunningTimeNanos() == taskInfo.getRunningTimeNanos() &&
isCancellable() == taskInfo.isCancellable() &&
Objects.equals(getTaskId(), taskInfo.getTaskId()) &&
Objects.equals(getType(), taskInfo.getType()) &&
Objects.equals(getAction(), taskInfo.getAction()) &&
Objects.equals(getDescription(), taskInfo.getDescription()) &&
Objects.equals(getParentTaskId(), taskInfo.getParentTaskId()) &&
Objects.equals(status, taskInfo.status) &&
Objects.equals(getHeaders(), taskInfo.getHeaders());
return getStartTime() == taskInfo.getStartTime()
&& getRunningTimeNanos() == taskInfo.getRunningTimeNanos()
&& isCancellable() == taskInfo.isCancellable()
&& Objects.equals(getTaskId(), taskInfo.getTaskId())
&& Objects.equals(getType(), taskInfo.getType())
&& Objects.equals(getAction(), taskInfo.getAction())
&& Objects.equals(getDescription(), taskInfo.getDescription())
&& Objects.equals(getParentTaskId(), taskInfo.getParentTaskId())
&& Objects.equals(status, taskInfo.status)
&& Objects.equals(getHeaders(), taskInfo.getHeaders());
}
@Override
public int hashCode() {
return Objects.hash(
getTaskId(), getType(), getAction(), getDescription(), getStartTime(),
getRunningTimeNanos(), isCancellable(), getParentTaskId(), status, getHeaders()
getTaskId(),
getType(),
getAction(),
getDescription(),
getStartTime(),
getRunningTimeNanos(),
isCancellable(),
getParentTaskId(),
status,
getHeaders()
);
}
@Override
public String toString() {
return "TaskInfo{" +
"taskId=" + taskId +
", type='" + type + '\'' +
", action='" + action + '\'' +
", description='" + description + '\'' +
", startTime=" + startTime +
", runningTimeNanos=" + runningTimeNanos +
", cancellable=" + cancellable +
", parentTaskId=" + parentTaskId +
", status=" + status +
", headers=" + headers +
'}';
return "TaskInfo{"
+ "taskId="
+ taskId
+ ", type='"
+ type
+ '\''
+ ", action='"
+ action
+ '\''
+ ", description='"
+ description
+ '\''
+ ", startTime="
+ startTime
+ ", runningTimeNanos="
+ runningTimeNanos
+ ", cancellable="
+ cancellable
+ ", parentTaskId="
+ parentTaskId
+ ", status="
+ status
+ ", headers="
+ headers
+ '}';
}
}

View File

@ -50,7 +50,7 @@ public class TaskOperationFailure {
private final OpenSearchException reason;
private final String status;
public TaskOperationFailure(String nodeId, long taskId,String status, OpenSearchException reason) {
public TaskOperationFailure(String nodeId, long taskId, String status, OpenSearchException reason) {
this.nodeId = nodeId;
this.taskId = taskId;
this.status = status;
@ -78,38 +78,49 @@ public class TaskOperationFailure {
if (this == o) return true;
if (!(o instanceof TaskOperationFailure)) return false;
TaskOperationFailure that = (TaskOperationFailure) o;
return getTaskId() == that.getTaskId() &&
Objects.equals(getNodeId(), that.getNodeId()) &&
Objects.equals(getReason(), that.getReason()) &&
Objects.equals(getStatus(), that.getStatus());
return getTaskId() == that.getTaskId()
&& Objects.equals(getNodeId(), that.getNodeId())
&& Objects.equals(getReason(), that.getReason())
&& Objects.equals(getStatus(), that.getStatus());
}
@Override
public int hashCode() {
return Objects.hash(getNodeId(), getTaskId(), getReason(), getStatus());
}
@Override
public String toString() {
return "TaskOperationFailure{" +
"nodeId='" + nodeId + '\'' +
", taskId=" + taskId +
", reason=" + reason +
", status='" + status + '\'' +
'}';
return "TaskOperationFailure{"
+ "nodeId='"
+ nodeId
+ '\''
+ ", taskId="
+ taskId
+ ", reason="
+ reason
+ ", status='"
+ status
+ '\''
+ '}';
}
public static TaskOperationFailure fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
private static final ConstructingObjectParser<TaskOperationFailure, Void> PARSER =
new ConstructingObjectParser<>("task_info", true, constructorObjects -> {
private static final ConstructingObjectParser<TaskOperationFailure, Void> PARSER = new ConstructingObjectParser<>(
"task_info",
true,
constructorObjects -> {
int i = 0;
String nodeId = (String) constructorObjects[i++];
long taskId = (long) constructorObjects[i++];
String status = (String) constructorObjects[i++];
OpenSearchException reason = (OpenSearchException) constructorObjects[i];
return new TaskOperationFailure(nodeId, taskId, status, reason);
});
}
);
static {
PARSER.declareString(constructorArg(), new ParseField("node_id"));

View File

@ -45,7 +45,9 @@ public class TaskSubmissionResponse {
public static final ConstructingObjectParser<TaskSubmissionResponse, Void> PARSER = new ConstructingObjectParser<>(
"task_submission_response",
true, a -> new TaskSubmissionResponse((String) a[0]));
true,
a -> new TaskSubmissionResponse((String) a[0])
);
static {
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TASK);

View File

@ -61,10 +61,7 @@ public abstract class AbstractRequestTestCase<C extends ToXContent, S> extends O
final BytesReference bytes = toShuffledXContent(clientTestInstance, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean());
final XContent xContent = XContentFactory.xContent(xContentType);
final XContentParser parser = xContent.createParser(
xContentRegistry(),
LoggingDeprecationHandler.INSTANCE,
bytes.streamInput());
final XContentParser parser = xContent.createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, bytes.streamInput());
final S serverInstance = doParseToServerInstance(parser);
assertInstances(serverInstance, clientTestInstance);
}

View File

@ -63,7 +63,8 @@ public abstract class AbstractResponseTestCase<S extends ToXContent, C> extends
final XContentParser parser = xContent.createParser(
NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE,
bytes.streamInput());
bytes.streamInput()
);
final C clientInstance = doParseToClientInstance(parser);
assertInstances(serverTestInstance, clientInstance);
}

View File

@ -86,14 +86,20 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.Listener listener) {
return BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT,
bulkListener), listener);
(request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
listener
);
}
private static BulkProcessor.Builder initBulkProcessorBuilderUsingTypes(BulkProcessor.Listener listener) {
return BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(request, expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE),
bulkListener), listener);
(request, bulkListener) -> highLevelClient().bulkAsync(
request,
expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE),
bulkListener
),
listener
);
}
public void testThatBulkProcessorCountIsCorrect() throws Exception {
@ -101,11 +107,15 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
int numDocs = randomIntBetween(10, 100);
try (BulkProcessor processor = initBulkProcessorBuilder(listener)
//let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.build()) {
try (
BulkProcessor processor = initBulkProcessorBuilder(listener)
// let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1))
.setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.build()
) {
MultiGetRequest multiGetRequest = indexDocs(processor, numDocs);
@ -125,15 +135,20 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
int numDocs = randomIntBetween(10, 100);
try (BulkProcessor processor = initBulkProcessorBuilder(listener)
//let's make sure that this bulk won't be automatically flushed
.setConcurrentRequests(randomIntBetween(0, 10)).setBulkActions(numDocs + randomIntBetween(1, 100))
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)).build()) {
try (
BulkProcessor processor = initBulkProcessorBuilder(listener)
// let's make sure that this bulk won't be automatically flushed
.setConcurrentRequests(randomIntBetween(0, 10))
.setBulkActions(numDocs + randomIntBetween(1, 100))
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.build()
) {
MultiGetRequest multiGetRequest = indexDocs(processor, numDocs);
assertThat(latch.await(randomInt(500), TimeUnit.MILLISECONDS), equalTo(false));
//we really need an explicit flush as none of the bulk thresholds was reached
// we really need an explicit flush as none of the bulk thresholds was reached
processor.flush();
latch.await();
@ -160,10 +175,14 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
MultiGetRequest multiGetRequest;
try (BulkProcessor processor = initBulkProcessorBuilder(listener)
.setConcurrentRequests(concurrentRequests).setBulkActions(bulkActions)
//set interval and size to high values
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)).build()) {
try (
BulkProcessor processor = initBulkProcessorBuilder(listener).setConcurrentRequests(concurrentRequests)
.setBulkActions(bulkActions)
// set interval and size to high values
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.build()
) {
multiGetRequest = indexDocs(processor, numDocs);
@ -187,9 +206,9 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(bulkItemResponse.getFailureMessage(), bulkItemResponse.isFailed(), equalTo(false));
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
assertThat(bulkItemResponse.getType(), equalTo("_doc"));
//with concurrent requests > 1 we can't rely on the order of the bulk requests
// with concurrent requests > 1 we can't rely on the order of the bulk requests
assertThat(Integer.valueOf(bulkItemResponse.getId()), both(greaterThan(0)).and(lessThanOrEqualTo(numDocs)));
//we do want to check that we don't get duplicate ids back
// we do want to check that we don't get duplicate ids back
assertThat(ids.add(bulkItemResponse.getId()), equalTo(true));
}
@ -201,11 +220,12 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
int numDocs = randomIntBetween(10, 100);
BulkProcessor processor = initBulkProcessorBuilder(listener)
//let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(randomIntBetween(1, 10),
RandomPicks.randomFrom(random(), ByteSizeUnit.values())))
.build();
// let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1))
.setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(randomIntBetween(1, 10), RandomPicks.randomFrom(random(), ByteSizeUnit.values())))
.build();
MultiGetRequest multiGetRequest = indexDocs(processor, numDocs);
assertThat(processor.awaitClose(1, TimeUnit.MINUTES), is(true));
@ -229,14 +249,16 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception {
Request request = new Request("PUT", "/test-ro");
request.setJsonEntity("{\n" +
" \"settings\" : {\n" +
" \"index\" : {\n" +
" \"blocks.write\" : true\n" +
" }\n" +
" }\n" +
" \n" +
"}");
request.setJsonEntity(
"{\n"
+ " \"settings\" : {\n"
+ " \"index\" : {\n"
+ " \"blocks.write\" : true\n"
+ " }\n"
+ " }\n"
+ " \n"
+ "}"
);
Response response = client().performRequest(request);
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
@ -255,22 +277,26 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
MultiGetRequest multiGetRequest = new MultiGetRequest();
BulkProcessorTestListener listener = new BulkProcessorTestListener(latch, closeLatch);
try (BulkProcessor processor = initBulkProcessorBuilder(listener)
.setConcurrentRequests(concurrentRequests).setBulkActions(bulkActions)
//set interval and size to high values
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)).build()) {
try (
BulkProcessor processor = initBulkProcessorBuilder(listener).setConcurrentRequests(concurrentRequests)
.setBulkActions(bulkActions)
// set interval and size to high values
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.build()
) {
for (int i = 1; i <= numDocs; i++) {
// let's make sure we get at least 1 item in the MultiGetRequest regardless of the randomising roulette
if (randomBoolean() || multiGetRequest.getItems().size() == 0) {
testDocs++;
processor.add(new IndexRequest("test").id(Integer.toString(testDocs))
.source(XContentType.JSON, "field", "value"));
processor.add(new IndexRequest("test").id(Integer.toString(testDocs)).source(XContentType.JSON, "field", "value"));
multiGetRequest.add("test", Integer.toString(testDocs));
} else {
testReadOnlyDocs++;
processor.add(new IndexRequest("test-ro").id(Integer.toString(testReadOnlyDocs))
.source(XContentType.JSON, "field", "value"));
processor.add(
new IndexRequest("test-ro").id(Integer.toString(testReadOnlyDocs)).source(XContentType.JSON, "field", "value")
);
}
}
}
@ -289,15 +315,15 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(bulkItemResponse.getType(), equalTo("_doc"));
if (bulkItemResponse.getIndex().equals("test")) {
assertThat(bulkItemResponse.isFailed(), equalTo(false));
//with concurrent requests > 1 we can't rely on the order of the bulk requests
// with concurrent requests > 1 we can't rely on the order of the bulk requests
assertThat(Integer.valueOf(bulkItemResponse.getId()), both(greaterThan(0)).and(lessThanOrEqualTo(testDocs)));
//we do want to check that we don't get duplicate ids back
// we do want to check that we don't get duplicate ids back
assertThat(ids.add(bulkItemResponse.getId()), equalTo(true));
} else {
assertThat(bulkItemResponse.isFailed(), equalTo(true));
//with concurrent requests > 1 we can't rely on the order of the bulk requests
// with concurrent requests > 1 we can't rely on the order of the bulk requests
assertThat(Integer.valueOf(bulkItemResponse.getId()), both(greaterThan(0)).and(lessThanOrEqualTo(testReadOnlyDocs)));
//we do want to check that we don't get duplicate ids back
// we do want to check that we don't get duplicate ids back
assertThat(readOnlyIds.add(bulkItemResponse.getId()), equalTo(true));
}
}
@ -333,7 +359,6 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(hits, everyItem(hasProperty(fieldFromSource("user"), equalTo("some user"))));
assertThat(hits, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ"))));
Iterable<SearchHit> blogs = searchAll(new SearchRequest("blogs").routing("routing"));
assertThat(blogs, everyItem(hasProperty(fieldFromSource("title"), equalTo("some title"))));
assertThat(blogs, everyItem(hasProperty(fieldFromSource("fieldNameXYZ"), equalTo("valueXYZ"))));
@ -350,18 +375,22 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
{
final CountDownLatch latch = new CountDownLatch(1);
BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
//Check that untyped document additions inherit the global type
// Check that untyped document additions inherit the global type
String globalType = customType;
String localType = null;
try (BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener)
//let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
try (
BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener)
// let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1))
.setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.setGlobalIndex("test")
.setGlobalType(globalType)
.setGlobalRouting("routing")
.setGlobalPipeline("pipeline_id")
.build()) {
.build()
) {
indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id");
latch.await();
@ -380,20 +409,24 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
}
{
//Check that typed document additions don't inherit the global type
// Check that typed document additions don't inherit the global type
String globalType = ignoredType;
String localType = customType;
final CountDownLatch latch = new CountDownLatch(1);
BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
try (BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener)
//let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
try (
BulkProcessor processor = initBulkProcessorBuilderUsingTypes(listener)
// let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1))
.setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.setGlobalIndex("test")
.setGlobalType(globalType)
.setGlobalRouting("routing")
.setGlobalPipeline("pipeline_id")
.build()) {
.build()
) {
indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id");
latch.await();
@ -410,21 +443,25 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
}
}
{
//Check that untyped document additions and untyped global inherit the established custom type
// Check that untyped document additions and untyped global inherit the established custom type
// (the custom document type introduced to the mapping by the earlier code in this test)
String globalType = null;
String localType = null;
final CountDownLatch latch = new CountDownLatch(1);
BulkProcessorTestListener listener = new BulkProcessorTestListener(latch);
try (BulkProcessor processor = initBulkProcessorBuilder(listener)
//let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1)).setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24)).setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
try (
BulkProcessor processor = initBulkProcessorBuilder(listener)
// let's make sure that the bulk action limit trips, one single execution will index all the documents
.setConcurrentRequests(randomIntBetween(0, 1))
.setBulkActions(numDocs)
.setFlushInterval(TimeValue.timeValueHours(24))
.setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
.setGlobalIndex("test")
.setGlobalType(globalType)
.setGlobalRouting("routing")
.setGlobalPipeline("pipeline_id")
.build()) {
.build()
) {
indexDocs(processor, numDocs, null, localType, "test", globalType, "pipeline_id");
latch.await();
@ -444,19 +481,28 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
@SuppressWarnings("unchecked")
private Matcher<SearchHit>[] expectedIds(int numDocs) {
return IntStream.rangeClosed(1, numDocs)
.boxed()
.map(n -> hasId(n.toString()))
.<Matcher<SearchHit>>toArray(Matcher[]::new);
return IntStream.rangeClosed(1, numDocs).boxed().map(n -> hasId(n.toString())).<Matcher<SearchHit>>toArray(Matcher[]::new);
}
private MultiGetRequest indexDocs(BulkProcessor processor, int numDocs, String localIndex, String localType,
String globalIndex, String globalType, String globalPipeline) throws Exception {
private MultiGetRequest indexDocs(
BulkProcessor processor,
int numDocs,
String localIndex,
String localType,
String globalIndex,
String globalType,
String globalPipeline
) throws Exception {
MultiGetRequest multiGetRequest = new MultiGetRequest();
for (int i = 1; i <= numDocs; i++) {
if (randomBoolean()) {
processor.add(new IndexRequest(localIndex, localType, Integer.toString(i))
.source(XContentType.JSON, "field", randomRealisticUnicodeOfLengthBetween(1, 30)));
processor.add(
new IndexRequest(localIndex, localType, Integer.toString(i)).source(
XContentType.JSON,
"field",
randomRealisticUnicodeOfLengthBetween(1, 30)
)
);
} else {
BytesArray data = bytesBulkRequest(localIndex, localType, i);
processor.add(data, globalIndex, globalType, globalPipeline, XContentType.JSON);
@ -485,10 +531,7 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
action.field("_id", Integer.toString(id));
action.endObject().endObject();
XContentBuilder source = jsonBuilder()
.startObject()
.field("field", randomRealisticUnicodeOfLengthBetween(1, 30))
.endObject();
XContentBuilder source = jsonBuilder().startObject().field("field", randomRealisticUnicodeOfLengthBetween(1, 30)).endObject();
String request = Strings.toString(action) + "\n" + Strings.toString(source) + "\n";
return new BytesArray(request);
@ -509,8 +552,11 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(bulkItemResponse.getIndex(), equalTo("test"));
assertThat(bulkItemResponse.getType(), equalTo(expectedType));
assertThat(bulkItemResponse.getId(), equalTo(Integer.toString(i++)));
assertThat("item " + i + " failed with cause: " + bulkItemResponse.getFailureMessage(),
bulkItemResponse.isFailed(), equalTo(false));
assertThat(
"item " + i + " failed with cause: " + bulkItemResponse.getFailureMessage(),
bulkItemResponse.isFailed(),
equalTo(false)
);
}
}
@ -560,5 +606,4 @@ public class BulkProcessorIT extends OpenSearchRestHighLevelClientTestCase {
}
}
}

View File

@ -31,7 +31,6 @@
package org.opensearch.client;
import org.opensearch.client.RequestOptions;
import org.opensearch.action.admin.indices.refresh.RefreshRequest;
import org.opensearch.action.bulk.BackoffPolicy;
import org.opensearch.action.bulk.BulkItemResponse;
@ -63,7 +62,9 @@ public class BulkProcessorRetryIT extends OpenSearchRestHighLevelClientTestCase
private static BulkProcessor.Builder initBulkProcessorBuilder(BulkProcessor.Listener listener) {
return BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener), listener);
(request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
listener
);
}
public void testBulkRejectionLoadWithoutBackoff() throws Exception {
@ -84,8 +85,7 @@ public class BulkProcessorRetryIT extends OpenSearchRestHighLevelClientTestCase
BulkProcessor bulkProcessor = initBulkProcessorBuilder(new BulkProcessor.Listener() {
@Override
public void beforeBulk(long executionId, BulkRequest request) {
}
public void beforeBulk(long executionId, BulkRequest request) {}
@Override
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
@ -100,10 +100,7 @@ public class BulkProcessorRetryIT extends OpenSearchRestHighLevelClientTestCase
responses.add(failure);
latch.countDown();
}
}).setBulkActions(1)
.setConcurrentRequests(randomIntBetween(0, 100))
.setBackoffPolicy(internalPolicy)
.build();
}).setBulkActions(1).setConcurrentRequests(randomIntBetween(0, 100)).setBackoffPolicy(internalPolicy).build();
MultiGetRequest multiGetRequest = indexDocs(bulkProcessor, numberOfAsyncOps);
latch.await(10, TimeUnit.SECONDS);
@ -171,8 +168,10 @@ public class BulkProcessorRetryIT extends OpenSearchRestHighLevelClientTestCase
private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) {
MultiGetRequest multiGetRequest = new MultiGetRequest();
for (int i = 1; i <= numDocs; i++) {
processor.add(new IndexRequest(INDEX_NAME).id(Integer.toString(i))
.source(XContentType.JSON, "field", randomRealisticUnicodeOfCodepointLengthBetween(1, 30)));
processor.add(
new IndexRequest(INDEX_NAME).id(Integer.toString(i))
.source(XContentType.JSON, "field", randomRealisticUnicodeOfCodepointLengthBetween(1, 30))
);
multiGetRequest.add(INDEX_NAME, Integer.toString(i));
}
return multiGetRequest;

View File

@ -61,10 +61,8 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
createFieldAddingPipleine("xyz", "fieldNameXYZ", "valueXYZ");
BulkRequest request = new BulkRequest();
request.add(new IndexRequest("test").id("1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("test").id("2")
.source(XContentType.JSON, "field", "bulk2"));
request.add(new IndexRequest("test").id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("test").id("2").source(XContentType.JSON, "field", "bulk2"));
request.pipeline("xyz");
bulk(request);
@ -80,12 +78,8 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
BulkRequest request = new BulkRequest();
request.pipeline("globalId");
request.add(new IndexRequest("test").id("1")
.source(XContentType.JSON, "field", "bulk1")
.setPipeline("perIndexId"));
request.add(new IndexRequest("test").id("2")
.source(XContentType.JSON, "field", "bulk2")
.setPipeline("perIndexId"));
request.add(new IndexRequest("test").id("1").source(XContentType.JSON, "field", "bulk1").setPipeline("perIndexId"));
request.add(new IndexRequest("test").id("2").source(XContentType.JSON, "field", "bulk2").setPipeline("perIndexId"));
bulk(request);
@ -113,19 +107,19 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
bulk(request);
Iterable<SearchHit> hits = searchAll("test");
assertThat(hits, containsInAnyOrder(
both(hasId("1"))
.and(hasProperty(fieldFromSource("someNewField"), equalTo("someValue"))),
both(hasId("2"))
.and(hasProperty(fieldFromSource("fieldXYZ"), equalTo("valueXYZ")))));
assertThat(
hits,
containsInAnyOrder(
both(hasId("1")).and(hasProperty(fieldFromSource("someNewField"), equalTo("someValue"))),
both(hasId("2")).and(hasProperty(fieldFromSource("fieldXYZ"), equalTo("valueXYZ")))
)
);
}
public void testGlobalIndex() throws IOException {
BulkRequest request = new BulkRequest("global_index", null);
request.add(new IndexRequest().id("1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest().id("2")
.source(XContentType.JSON, "field", "bulk2"));
request.add(new IndexRequest().id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest().id("2").source(XContentType.JSON, "field", "bulk2"));
bulk(request);
@ -136,27 +130,22 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
@SuppressWarnings("unchecked")
public void testIndexGlobalAndPerRequest() throws IOException {
BulkRequest request = new BulkRequest("global_index", null);
request.add(new IndexRequest("local_index").id("1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest().id("2") // will take global index
.source(XContentType.JSON, "field", "bulk2"));
request.add(new IndexRequest("local_index").id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(
new IndexRequest().id("2") // will take global index
.source(XContentType.JSON, "field", "bulk2")
);
bulk(request);
Iterable<SearchHit> hits = searchAll("local_index", "global_index");
assertThat(hits, containsInAnyOrder(
both(hasId("1"))
.and(hasIndex("local_index")),
both(hasId("2"))
.and(hasIndex("global_index"))));
assertThat(hits, containsInAnyOrder(both(hasId("1")).and(hasIndex("local_index")), both(hasId("2")).and(hasIndex("global_index"))));
}
public void testGlobalType() throws IOException {
BulkRequest request = new BulkRequest(null, "global_type");
request.add(new IndexRequest("index").id("1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id("2")
.source(XContentType.JSON, "field", "bulk2"));
request.add(new IndexRequest("index").id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id("2").source(XContentType.JSON, "field", "bulk2"));
bulkWithTypes(request);
@ -166,28 +155,23 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
public void testTypeGlobalAndPerRequest() throws IOException {
BulkRequest request = new BulkRequest(null, "global_type");
request.add(new IndexRequest("index1", "local_type", "1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index2").id("2") // will take global type
.source(XContentType.JSON, "field", "bulk2"));
request.add(new IndexRequest("index1", "local_type", "1").source(XContentType.JSON, "field", "bulk1"));
request.add(
new IndexRequest("index2").id("2") // will take global type
.source(XContentType.JSON, "field", "bulk2")
);
bulkWithTypes(request);
Iterable<SearchHit> hits = searchAll("index1", "index2");
assertThat(hits, containsInAnyOrder(
both(hasId("1"))
.and(hasType("local_type")),
both(hasId("2"))
.and(hasType("global_type"))));
assertThat(hits, containsInAnyOrder(both(hasId("1")).and(hasType("local_type")), both(hasId("2")).and(hasType("global_type"))));
}
public void testGlobalRouting() throws IOException {
createIndexWithMultipleShards("index");
BulkRequest request = new BulkRequest((String) null);
request.add(new IndexRequest("index").id("1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id("2")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id("2").source(XContentType.JSON, "field", "bulk1"));
request.routing("1");
bulk(request);
@ -201,11 +185,8 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
public void testMixLocalAndGlobalRouting() throws IOException {
BulkRequest request = new BulkRequest((String) null);
request.routing("globalRouting");
request.add(new IndexRequest("index").id("1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id( "2")
.routing("localRouting")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest("index").id("2").routing("localRouting").source(XContentType.JSON, "field", "bulk1"));
bulk(request);
@ -215,10 +196,8 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
public void testGlobalIndexNoTypes() throws IOException {
BulkRequest request = new BulkRequest("global_index");
request.add(new IndexRequest().id("1")
.source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest().id("2")
.source(XContentType.JSON, "field", "bulk2"));
request.add(new IndexRequest().id("1").source(XContentType.JSON, "field", "bulk1"));
request.add(new IndexRequest().id("2").source(XContentType.JSON, "field", "bulk2"));
bulk(request);
@ -227,8 +206,12 @@ public class BulkRequestWithGlobalParametersIT extends OpenSearchRestHighLevelCl
}
private BulkResponse bulkWithTypes(BulkRequest request) throws IOException {
BulkResponse bulkResponse = execute(request, highLevelClient()::bulk, highLevelClient()::bulkAsync,
expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
BulkResponse bulkResponse = execute(
request,
highLevelClient()::bulk,
highLevelClient()::bulkAsync,
expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE)
);
assertFalse(bulkResponse.hasFailures());
return bulkResponse;
}

View File

@ -33,8 +33,6 @@
package org.opensearch.client;
import org.apache.http.util.EntityUtils;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
import org.opensearch.OpenSearchException;
import org.opensearch.OpenSearchStatusException;
import org.opensearch.action.admin.cluster.health.ClusterHealthRequest;
@ -101,14 +99,19 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
setRequest.transientSettings(transientSettings);
setRequest.persistentSettings(map);
ClusterUpdateSettingsResponse setResponse = execute(setRequest, highLevelClient().cluster()::putSettings,
highLevelClient().cluster()::putSettingsAsync);
ClusterUpdateSettingsResponse setResponse = execute(
setRequest,
highLevelClient().cluster()::putSettings,
highLevelClient().cluster()::putSettingsAsync
);
assertAcked(setResponse);
assertThat(setResponse.getTransientSettings().get(transientSettingKey), notNullValue());
assertThat(setResponse.getTransientSettings().get(persistentSettingKey), nullValue());
assertThat(setResponse.getTransientSettings().get(transientSettingKey),
equalTo(transientSettingValue + ByteSizeUnit.BYTES.getSuffix()));
assertThat(
setResponse.getTransientSettings().get(transientSettingKey),
equalTo(transientSettingValue + ByteSizeUnit.BYTES.getSuffix())
);
assertThat(setResponse.getPersistentSettings().get(transientSettingKey), nullValue());
assertThat(setResponse.getPersistentSettings().get(persistentSettingKey), notNullValue());
assertThat(setResponse.getPersistentSettings().get(persistentSettingKey), equalTo(persistentSettingValue));
@ -123,8 +126,11 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
resetRequest.transientSettings(Settings.builder().putNull(transientSettingKey));
resetRequest.persistentSettings("{\"" + persistentSettingKey + "\": null }", XContentType.JSON);
ClusterUpdateSettingsResponse resetResponse = execute(resetRequest, highLevelClient().cluster()::putSettings,
highLevelClient().cluster()::putSettingsAsync);
ClusterUpdateSettingsResponse resetResponse = execute(
resetRequest,
highLevelClient().cluster()::putSettings,
highLevelClient().cluster()::putSettingsAsync
);
assertThat(resetResponse.getTransientSettings().get(transientSettingKey), equalTo(null));
assertThat(resetResponse.getPersistentSettings().get(persistentSettingKey), equalTo(null));
@ -144,11 +150,19 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
ClusterUpdateSettingsRequest clusterUpdateSettingsRequest = new ClusterUpdateSettingsRequest();
clusterUpdateSettingsRequest.transientSettings(Settings.builder().put(setting, value).build());
OpenSearchException exception = expectThrows(OpenSearchException.class, () -> execute(clusterUpdateSettingsRequest,
highLevelClient().cluster()::putSettings, highLevelClient().cluster()::putSettingsAsync));
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(
clusterUpdateSettingsRequest,
highLevelClient().cluster()::putSettings,
highLevelClient().cluster()::putSettingsAsync
)
);
assertThat(exception.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(exception.getMessage(), equalTo(
"OpenSearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]"));
assertThat(
exception.getMessage(),
equalTo("OpenSearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]")
);
}
public void testClusterGetSettings() throws IOException {
@ -158,14 +172,16 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
final String persistentSettingKey = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey();
final String persistentSettingValue = EnableAllocationDecider.Allocation.NONE.name();
Settings transientSettings =
Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build();
Settings transientSettings = Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build();
Settings persistentSettings = Settings.builder().put(persistentSettingKey, persistentSettingValue).build();
clusterUpdateSettings(persistentSettings, transientSettings);
ClusterGetSettingsRequest request = new ClusterGetSettingsRequest();
ClusterGetSettingsResponse response = execute(
request, highLevelClient().cluster()::getSettings, highLevelClient().cluster()::getSettingsAsync);
request,
highLevelClient().cluster()::getSettings,
highLevelClient().cluster()::getSettingsAsync
);
assertEquals(persistentSettings, response.getPersistentSettings());
assertEquals(transientSettings, response.getTransientSettings());
assertEquals(0, response.getDefaultSettings().size());
@ -178,14 +194,16 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
final String persistentSettingKey = EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey();
final String persistentSettingValue = EnableAllocationDecider.Allocation.NONE.name();
Settings transientSettings =
Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build();
Settings transientSettings = Settings.builder().put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES).build();
Settings persistentSettings = Settings.builder().put(persistentSettingKey, persistentSettingValue).build();
clusterUpdateSettings(persistentSettings, transientSettings);
ClusterGetSettingsRequest request = new ClusterGetSettingsRequest().includeDefaults(true);
ClusterGetSettingsResponse response = execute(
request, highLevelClient().cluster()::getSettings, highLevelClient().cluster()::getSettingsAsync);
request,
highLevelClient().cluster()::getSettings,
highLevelClient().cluster()::getSettingsAsync
);
assertEquals(persistentSettings, response.getPersistentSettings());
assertEquals(transientSettings, response.getTransientSettings());
assertThat(response.getDefaultSettings().size(), greaterThan(0));
@ -209,8 +227,7 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
request.timeout("5s");
ClusterHealthResponse response = execute(request, highLevelClient().cluster()::health, highLevelClient().cluster()::healthAsync);
logger.info("Shard stats\n{}", EntityUtils.toString(
client().performRequest(new Request("GET", "/_cat/shards")).getEntity()));
logger.info("Shard stats\n{}", EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/shards")).getEntity()));
assertThat(response.getIndices().size(), equalTo(0));
}
@ -230,8 +247,7 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
request.level(ClusterHealthRequest.Level.INDICES);
ClusterHealthResponse response = execute(request, highLevelClient().cluster()::health, highLevelClient().cluster()::healthAsync);
logger.info("Shard stats\n{}", EntityUtils.toString(
client().performRequest(new Request("GET", "/_cat/shards")).getEntity()));
logger.info("Shard stats\n{}", EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/shards")).getEntity()));
assertYellowShards(response);
assertThat(response.getIndices().size(), equalTo(2));
for (Map.Entry<String, ClusterIndexHealth> entry : response.getIndices().entrySet()) {
@ -253,7 +269,6 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
assertThat(response.getUnassignedShards(), equalTo(2));
}
public void testClusterHealthYellowSpecificIndex() throws IOException {
createIndex("index", Settings.EMPTY);
createIndex("index2", Settings.EMPTY);
@ -280,14 +295,14 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
private static void assertYellowIndex(String indexName, ClusterIndexHealth indexHealth, boolean emptyShards) {
assertThat(indexHealth, notNullValue());
assertThat(indexHealth.getIndex(),equalTo(indexName));
assertThat(indexHealth.getActivePrimaryShards(),equalTo(1));
assertThat(indexHealth.getActiveShards(),equalTo(1));
assertThat(indexHealth.getNumberOfReplicas(),equalTo(1));
assertThat(indexHealth.getInitializingShards(),equalTo(0));
assertThat(indexHealth.getUnassignedShards(),equalTo(1));
assertThat(indexHealth.getRelocatingShards(),equalTo(0));
assertThat(indexHealth.getStatus(),equalTo(ClusterHealthStatus.YELLOW));
assertThat(indexHealth.getIndex(), equalTo(indexName));
assertThat(indexHealth.getActivePrimaryShards(), equalTo(1));
assertThat(indexHealth.getActiveShards(), equalTo(1));
assertThat(indexHealth.getNumberOfReplicas(), equalTo(1));
assertThat(indexHealth.getInitializingShards(), equalTo(0));
assertThat(indexHealth.getUnassignedShards(), equalTo(1));
assertThat(indexHealth.getRelocatingShards(), equalTo(0));
assertThat(indexHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW));
if (emptyShards) {
assertThat(indexHealth.getShards().size(), equalTo(0));
} else {
@ -340,20 +355,21 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
settingsRequest.includeDefaults(true);
ClusterGetSettingsResponse settingsResponse = highLevelClient().cluster().getSettings(settingsRequest, RequestOptions.DEFAULT);
List<String> seeds = SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS
.getConcreteSettingForNamespace(clusterAlias)
.get(settingsResponse.getTransientSettings());
int connectionsPerCluster = SniffConnectionStrategy.REMOTE_CONNECTIONS_PER_CLUSTER
.get(settingsResponse.getTransientSettings());
TimeValue initialConnectionTimeout = RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING
.get(settingsResponse.getTransientSettings());
boolean skipUnavailable = RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE
.getConcreteSettingForNamespace(clusterAlias)
.get(settingsResponse.getTransientSettings());
List<String> seeds = SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS.getConcreteSettingForNamespace(clusterAlias)
.get(settingsResponse.getTransientSettings());
int connectionsPerCluster = SniffConnectionStrategy.REMOTE_CONNECTIONS_PER_CLUSTER.get(settingsResponse.getTransientSettings());
TimeValue initialConnectionTimeout = RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(
settingsResponse.getTransientSettings()
);
boolean skipUnavailable = RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE.getConcreteSettingForNamespace(clusterAlias)
.get(settingsResponse.getTransientSettings());
RemoteInfoRequest request = new RemoteInfoRequest();
RemoteInfoResponse response = execute(request, highLevelClient().cluster()::remoteInfo,
highLevelClient().cluster()::remoteInfoAsync);
RemoteInfoResponse response = execute(
request,
highLevelClient().cluster()::remoteInfo,
highLevelClient().cluster()::remoteInfoAsync
);
assertThat(response, notNullValue());
assertThat(response.getInfos().size(), equalTo(1));
@ -376,40 +392,61 @@ public class ClusterClientIT extends OpenSearchRestHighLevelClientTestCase {
AliasMetadata alias = AliasMetadata.builder("alias").writeIndex(true).build();
Template template = new Template(settings, mappings, Collections.singletonMap("alias", alias));
ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>());
PutComponentTemplateRequest putComponentTemplateRequest =
new PutComponentTemplateRequest().name(templateName).create(true).componentTemplate(componentTemplate);
PutComponentTemplateRequest putComponentTemplateRequest = new PutComponentTemplateRequest().name(templateName)
.create(true)
.componentTemplate(componentTemplate);
AcknowledgedResponse response = execute(putComponentTemplateRequest,
highLevelClient().cluster()::putComponentTemplate, highLevelClient().cluster()::putComponentTemplateAsync);
AcknowledgedResponse response = execute(
putComponentTemplateRequest,
highLevelClient().cluster()::putComponentTemplate,
highLevelClient().cluster()::putComponentTemplateAsync
);
assertThat(response.isAcknowledged(), equalTo(true));
ComponentTemplatesExistRequest componentTemplatesExistRequest = new ComponentTemplatesExistRequest(templateName);
boolean exist = execute(componentTemplatesExistRequest,
highLevelClient().cluster()::existsComponentTemplate, highLevelClient().cluster()::existsComponentTemplateAsync);
boolean exist = execute(
componentTemplatesExistRequest,
highLevelClient().cluster()::existsComponentTemplate,
highLevelClient().cluster()::existsComponentTemplateAsync
);
assertTrue(exist);
GetComponentTemplatesRequest getComponentTemplatesRequest = new GetComponentTemplatesRequest(templateName);
GetComponentTemplatesResponse getResponse = execute(getComponentTemplatesRequest,
highLevelClient().cluster()::getComponentTemplate, highLevelClient().cluster()::getComponentTemplateAsync);
GetComponentTemplatesResponse getResponse = execute(
getComponentTemplatesRequest,
highLevelClient().cluster()::getComponentTemplate,
highLevelClient().cluster()::getComponentTemplateAsync
);
assertThat(getResponse.getComponentTemplates().size(), equalTo(1));
assertThat(getResponse.getComponentTemplates().containsKey(templateName), equalTo(true));
assertThat(getResponse.getComponentTemplates().get(templateName), equalTo(componentTemplate));
DeleteComponentTemplateRequest deleteComponentTemplateRequest = new DeleteComponentTemplateRequest(templateName);
response = execute(deleteComponentTemplateRequest, highLevelClient().cluster()::deleteComponentTemplate,
highLevelClient().cluster()::deleteComponentTemplateAsync);
response = execute(
deleteComponentTemplateRequest,
highLevelClient().cluster()::deleteComponentTemplate,
highLevelClient().cluster()::deleteComponentTemplateAsync
);
assertThat(response.isAcknowledged(), equalTo(true));
OpenSearchStatusException statusException = expectThrows(OpenSearchStatusException.class,
() -> execute(getComponentTemplatesRequest,
highLevelClient().cluster()::getComponentTemplate, highLevelClient().cluster()::getComponentTemplateAsync));
OpenSearchStatusException statusException = expectThrows(
OpenSearchStatusException.class,
() -> execute(
getComponentTemplatesRequest,
highLevelClient().cluster()::getComponentTemplate,
highLevelClient().cluster()::getComponentTemplateAsync
)
);
assertThat(statusException.status(), equalTo(RestStatus.NOT_FOUND));
exist = execute(componentTemplatesExistRequest,
highLevelClient().cluster()::existsComponentTemplate, highLevelClient().cluster()::existsComponentTemplateAsync);
exist = execute(
componentTemplatesExistRequest,
highLevelClient().cluster()::existsComponentTemplate,
highLevelClient().cluster()::existsComponentTemplateAsync
);
assertFalse(exist);
}

View File

@ -34,7 +34,6 @@ package org.opensearch.client;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPut;
import org.opensearch.client.Request;
import org.opensearch.action.admin.cluster.health.ClusterHealthRequest;
import org.opensearch.action.admin.cluster.settings.ClusterGetSettingsRequest;
import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;

View File

@ -32,7 +32,6 @@
package org.opensearch.client;
import org.opensearch.client.RequestOptions;
import org.opensearch.OpenSearchException;
import org.opensearch.OpenSearchStatusException;
import org.opensearch.action.DocWriteRequest;
@ -104,7 +103,9 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
// Testing deletion
String docId = "id";
IndexResponse indexResponse = highLevelClient().index(
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT);
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar")),
RequestOptions.DEFAULT
);
assertThat(indexResponse.getSeqNo(), greaterThanOrEqualTo(0L));
DeleteRequest deleteRequest = new DeleteRequest("index", docId);
if (randomBoolean()) {
@ -131,23 +132,35 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
// Testing version conflict
String docId = "version_conflict";
highLevelClient().index(
new IndexRequest("index").id( docId).source(Collections.singletonMap("foo", "bar")), RequestOptions.DEFAULT);
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar")),
RequestOptions.DEFAULT
);
DeleteRequest deleteRequest = new DeleteRequest("index", docId).setIfSeqNo(2).setIfPrimaryTerm(2);
OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync));
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync)
);
assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[" + docId + "]: " +
"version conflict, required seqNo [2], primary term [2]. current document has seqNo [3] and primary term [1]]",
exception.getMessage());
assertEquals(
"OpenSearch exception [type=version_conflict_engine_exception, reason=["
+ docId
+ "]: "
+ "version conflict, required seqNo [2], primary term [2]. current document has seqNo [3] and primary term [1]]",
exception.getMessage()
);
assertEquals("index", exception.getMetadata("opensearch.index").get(0));
}
{
// Testing version type
String docId = "version_type";
highLevelClient().index(
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar"))
.versionType(VersionType.EXTERNAL).version(12), RequestOptions.DEFAULT);
DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(13);
new IndexRequest("index").id(docId)
.source(Collections.singletonMap("foo", "bar"))
.versionType(VersionType.EXTERNAL)
.version(12),
RequestOptions.DEFAULT
);
DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(13);
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
assertEquals("index", deleteResponse.getIndex());
assertEquals("_doc", deleteResponse.getType());
@ -158,23 +171,33 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
// Testing version type with a wrong version
String docId = "wrong_version";
highLevelClient().index(
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar"))
.versionType(VersionType.EXTERNAL).version(12), RequestOptions.DEFAULT);
new IndexRequest("index").id(docId)
.source(Collections.singletonMap("foo", "bar"))
.versionType(VersionType.EXTERNAL)
.version(12),
RequestOptions.DEFAULT
);
OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () -> {
DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(10);
DeleteRequest deleteRequest = new DeleteRequest("index", docId).versionType(VersionType.EXTERNAL).version(10);
execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
});
assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[" +
docId + "]: version conflict, current version [12] is higher or equal to the one provided [10]]", exception.getMessage());
assertEquals(
"OpenSearch exception [type=version_conflict_engine_exception, reason=["
+ docId
+ "]: version conflict, current version [12] is higher or equal to the one provided [10]]",
exception.getMessage()
);
assertEquals("index", exception.getMetadata("opensearch.index").get(0));
}
{
// Testing routing
String docId = "routing";
highLevelClient().index(new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar")).routing("foo"),
RequestOptions.DEFAULT);
DeleteRequest deleteRequest = new DeleteRequest("index", docId).routing("foo");
highLevelClient().index(
new IndexRequest("index").id(docId).source(Collections.singletonMap("foo", "bar")).routing("foo"),
RequestOptions.DEFAULT
);
DeleteRequest deleteRequest = new DeleteRequest("index", docId).routing("foo");
DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
assertEquals("index", deleteResponse.getIndex());
assertEquals("_doc", deleteResponse.getType());
@ -187,17 +210,20 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
String docId = "id";
IndexRequest indexRequest = new IndexRequest("index", "type", docId);
indexRequest.source(Collections.singletonMap("foo", "bar"));
execute(indexRequest,
execute(
indexRequest,
highLevelClient()::index,
highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
);
DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId);
DeleteResponse deleteResponse = execute(deleteRequest,
DeleteResponse deleteResponse = execute(
deleteRequest,
highLevelClient()::delete,
highLevelClient()::deleteAsync,
expectWarnings(RestDeleteAction.TYPES_DEPRECATION_MESSAGE));
expectWarnings(RestDeleteAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals("index", deleteResponse.getIndex());
assertEquals("type", deleteResponse.getType());
@ -276,20 +302,16 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
final String noSourceIndex = "no_source";
{
// Prepare
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
Settings settings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build();
String mapping = "\"_source\": {\"enabled\": false}";
createIndex(noSourceIndex, settings, mapping);
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
new BulkRequest()
.add(new IndexRequest(noSourceIndex).id("1")
.source(Collections.singletonMap("foo", 1), XContentType.JSON))
.add(new IndexRequest(noSourceIndex).id("2")
.source(Collections.singletonMap("foo", 2), XContentType.JSON))
new BulkRequest().add(
new IndexRequest(noSourceIndex).id("1").source(Collections.singletonMap("foo", 1), XContentType.JSON)
)
.add(new IndexRequest(noSourceIndex).id("2").source(Collections.singletonMap("foo", 2), XContentType.JSON))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
RequestOptions.DEFAULT
).status()
@ -309,8 +331,10 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
public void testGet() throws IOException {
{
GetRequest getRequest = new GetRequest("index", "id");
OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync));
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage());
assertEquals("index", exception.getMetadata("opensearch.index").get(0));
@ -322,11 +346,17 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
highLevelClient().index(index, RequestOptions.DEFAULT);
{
GetRequest getRequest = new GetRequest("index", "id").version(2);
OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync));
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)
);
assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, " + "reason=[id]: " +
"version conflict, current version [1] is different than the one provided [2]]", exception.getMessage());
assertEquals(
"OpenSearch exception [type=version_conflict_engine_exception, "
+ "reason=[id]: "
+ "version conflict, current version [1] is different than the one provided [2]]",
exception.getMessage()
);
assertEquals("index", exception.getMetadata("opensearch.index").get(0));
}
{
@ -369,9 +399,9 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
{
GetRequest getRequest = new GetRequest("index", "id");
if (randomBoolean()) {
getRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, Strings.EMPTY_ARRAY));
getRequest.fetchSourceContext(new FetchSourceContext(true, new String[] { "field1" }, Strings.EMPTY_ARRAY));
} else {
getRequest.fetchSourceContext(new FetchSourceContext(true, Strings.EMPTY_ARRAY, new String[]{"field2"}));
getRequest.fetchSourceContext(new FetchSourceContext(true, Strings.EMPTY_ARRAY, new String[] { "field2" }));
}
GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
assertEquals("index", getResponse.getIndex());
@ -391,17 +421,20 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
IndexRequest indexRequest = new IndexRequest("index", "type", "id");
indexRequest.source(document, XContentType.JSON);
indexRequest.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
execute(indexRequest,
execute(
indexRequest,
highLevelClient()::index,
highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
);
GetRequest getRequest = new GetRequest("index", "type", "id");
GetResponse getResponse = execute(getRequest,
GetResponse getResponse = execute(
getRequest,
highLevelClient()::get,
highLevelClient()::getAsync,
expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE));
expectWarnings(RestGetAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals("index", getResponse.getIndex());
assertEquals("type", getResponse.getType());
@ -426,16 +459,20 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals("id1", response.getResponses()[0].getFailure().getId());
assertNull(response.getResponses()[0].getFailure().getType());
assertEquals("index", response.getResponses()[0].getFailure().getIndex());
assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]",
response.getResponses()[0].getFailure().getFailure().getMessage());
assertEquals(
"OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]",
response.getResponses()[0].getFailure().getFailure().getMessage()
);
assertTrue(response.getResponses()[1].isFailed());
assertNull(response.getResponses()[1].getResponse());
assertEquals("id2", response.getResponses()[1].getId());
assertNull(response.getResponses()[1].getType());
assertEquals("index", response.getResponses()[1].getIndex());
assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]",
response.getResponses()[1].getFailure().getFailure().getMessage());
assertEquals(
"OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]",
response.getResponses()[1].getFailure().getFailure().getMessage()
);
}
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
@ -472,20 +509,20 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
public void testMultiGetWithTypes() throws IOException {
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
bulk.add(new IndexRequest("index", "type", "id1")
.source("{\"field\":\"value1\"}", XContentType.JSON));
bulk.add(new IndexRequest("index", "type", "id2")
.source("{\"field\":\"value2\"}", XContentType.JSON));
bulk.add(new IndexRequest("index", "type", "id1").source("{\"field\":\"value1\"}", XContentType.JSON));
bulk.add(new IndexRequest("index", "type", "id2").source("{\"field\":\"value2\"}", XContentType.JSON));
highLevelClient().bulk(bulk, expectWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE));
MultiGetRequest multiGetRequest = new MultiGetRequest();
multiGetRequest.add("index", "id1");
multiGetRequest.add("index", "type", "id2");
MultiGetResponse response = execute(multiGetRequest,
MultiGetResponse response = execute(
multiGetRequest,
highLevelClient()::mget,
highLevelClient()::mgetAsync,
expectWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE));
expectWarnings(RestMultiGetAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals(2, response.getResponses().length);
GetResponse firstResponse = response.getResponses()[0].getResponse();
@ -502,8 +539,10 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
public void testGetSource() throws IOException {
{
GetSourceRequest getRequest = new GetSourceRequest("index", "id");
OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync));
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync)
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage());
assertEquals("index", exception.getMetadata("opensearch.index").get(0));
@ -523,11 +562,15 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
}
{
GetSourceRequest getRequest = new GetSourceRequest("index", "does_not_exist");
OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync));
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync)
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("OpenSearch exception [type=resource_not_found_exception, " +
"reason=Document not found [index]/[_doc]/[does_not_exist]]", exception.getMessage());
assertEquals(
"OpenSearch exception [type=resource_not_found_exception, " + "reason=Document not found [index]/[_doc]/[does_not_exist]]",
exception.getMessage()
);
}
{
GetSourceRequest getRequest = new GetSourceRequest("index", "id");
@ -540,7 +583,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
}
{
GetSourceRequest getRequest = new GetSourceRequest("index", "id");
getRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, Strings.EMPTY_ARRAY));
getRequest.fetchSourceContext(new FetchSourceContext(true, new String[] { "field1" }, Strings.EMPTY_ARRAY));
GetSourceResponse response = execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync);
Map<String, Object> expectedResponse = new HashMap<>();
expectedResponse.put("field1", "value1");
@ -548,7 +591,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
}
{
GetSourceRequest getRequest = new GetSourceRequest("index", "id");
getRequest.fetchSourceContext(new FetchSourceContext(true, Strings.EMPTY_ARRAY, new String[]{"field1"}));
getRequest.fetchSourceContext(new FetchSourceContext(true, Strings.EMPTY_ARRAY, new String[] { "field1" }));
GetSourceResponse response = execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync);
Map<String, Object> expectedResponse = new HashMap<>();
expectedResponse.put("field2", "value2");
@ -557,10 +600,15 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
{
GetSourceRequest getRequest = new GetSourceRequest("index", "id");
getRequest.fetchSourceContext(new FetchSourceContext(false));
OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync));
assertEquals("OpenSearch exception [type=action_request_validation_exception, " +
"reason=Validation Failed: 1: fetching source can not be disabled;]", exception.getMessage());
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(getRequest, highLevelClient()::getSource, highLevelClient()::getSourceAsync)
);
assertEquals(
"OpenSearch exception [type=action_request_validation_exception, "
+ "reason=Validation Failed: 1: fetching source can not be disabled;]",
exception.getMessage()
);
}
}
@ -616,9 +664,11 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
execute(wrongRequest, highLevelClient()::index, highLevelClient()::indexAsync);
});
assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[id]: " +
"version conflict, required seqNo [1], primary term [5]. current document has seqNo [2] and primary term [1]]",
exception.getMessage());
assertEquals(
"OpenSearch exception [type=version_conflict_engine_exception, reason=[id]: "
+ "version conflict, required seqNo [1], primary term [5]. current document has seqNo [2] and primary term [1]]",
exception.getMessage()
);
assertEquals("index", exception.getMetadata("opensearch.index").get(0));
}
{
@ -631,8 +681,10 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
});
assertEquals(RestStatus.BAD_REQUEST, exception.status());
assertEquals("OpenSearch exception [type=illegal_argument_exception, " +
"reason=pipeline with id [missing] does not exist]", exception.getMessage());
assertEquals(
"OpenSearch exception [type=illegal_argument_exception, " + "reason=pipeline with id [missing] does not exist]",
exception.getMessage()
);
}
{
IndexRequest indexRequest = new IndexRequest("index").id("external_version_type");
@ -658,13 +710,17 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals("_doc", indexResponse.getType());
assertEquals("with_create_op_type", indexResponse.getId());
OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () -> {
execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
});
OpenSearchStatusException exception = expectThrows(
OpenSearchStatusException.class,
() -> { execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync); }
);
assertEquals(RestStatus.CONFLICT, exception.status());
assertEquals("OpenSearch exception [type=version_conflict_engine_exception, reason=[with_create_op_type]: " +
"version conflict, document already exists (current version [1])]", exception.getMessage());
assertEquals(
"OpenSearch exception [type=version_conflict_engine_exception, reason=[with_create_op_type]: "
+ "version conflict, document already exists (current version [1])]",
exception.getMessage()
);
}
}
@ -681,7 +737,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals(RestStatus.CREATED, indexResponse.status());
assertEquals("index", indexResponse.getIndex());
assertEquals("some_type", indexResponse.getType());
assertEquals("some_id",indexResponse.getId());
assertEquals("some_id", indexResponse.getId());
}
public void testUpdate() throws IOException {
@ -689,19 +745,22 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
UpdateRequest updateRequest = new UpdateRequest("index", "does_not_exist");
updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values()));
OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () ->
execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync));
OpenSearchStatusException exception = expectThrows(
OpenSearchStatusException.class,
() -> execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync)
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("OpenSearch exception [type=document_missing_exception, reason=[_doc][does_not_exist]: document missing]",
exception.getMessage());
assertEquals(
"OpenSearch exception [type=document_missing_exception, reason=[_doc][does_not_exist]: document missing]",
exception.getMessage()
);
}
{
IndexRequest indexRequest = new IndexRequest("index").id( "id");
IndexRequest indexRequest = new IndexRequest("index").id("id");
indexRequest.source(singletonMap("field", "value"));
IndexResponse indexResponse = highLevelClient().index(indexRequest, RequestOptions.DEFAULT);
assertEquals(RestStatus.CREATED, indexResponse.status());
long lastUpdateSeqNo;
long lastUpdatePrimaryTerm;
{
@ -726,9 +785,11 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
updateRequest.setIfSeqNo(lastUpdateSeqNo + (randomBoolean() ? 0 : 1));
updateRequest.setIfPrimaryTerm(lastUpdatePrimaryTerm + 1);
}
OpenSearchStatusException exception = expectThrows(OpenSearchStatusException.class, () ->
execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync));
assertEquals(exception.toString(),RestStatus.CONFLICT, exception.status());
OpenSearchStatusException exception = expectThrows(
OpenSearchStatusException.class,
() -> execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync)
);
assertEquals(exception.toString(), RestStatus.CONFLICT, exception.status());
assertThat(exception.getMessage(), containsString("OpenSearch exception [type=version_conflict_engine_exception"));
}
{
@ -860,15 +921,18 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
updateRequest.upsert(new IndexRequest().source(Collections.singletonMap("field", "upsert"), XContentType.YAML));
execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
});
assertEquals("Update request cannot have different content types for doc [JSON] and upsert [YAML] documents",
exception.getMessage());
assertEquals(
"Update request cannot have different content types for doc [JSON] and upsert [YAML] documents",
exception.getMessage()
);
}
}
public void testUpdateWithTypes() throws IOException {
IndexRequest indexRequest = new IndexRequest("index", "type", "id");
indexRequest.source(singletonMap("field", "value"));
IndexResponse indexResponse = execute(indexRequest,
IndexResponse indexResponse = execute(
indexRequest,
highLevelClient()::index,
highLevelClient()::indexAsync,
expectWarnings(RestIndexAction.TYPES_DEPRECATION_MESSAGE)
@ -876,10 +940,12 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values()));
UpdateResponse updateResponse = execute(updateRequest,
UpdateResponse updateResponse = execute(
updateRequest,
highLevelClient()::update,
highLevelClient()::updateAsync,
expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE));
expectWarnings(RestUpdateAction.TYPES_DEPRECATION_MESSAGE)
);
assertEquals(RestStatus.OK, updateResponse.status());
assertEquals(indexResponse.getVersion() + 1, updateResponse.getVersion());
@ -900,16 +966,18 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values());
if (opType == DocWriteRequest.OpType.DELETE) {
if (erroneous == false) {
assertEquals(RestStatus.CREATED,
highLevelClient().index(
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
assertEquals(
RestStatus.CREATED,
highLevelClient().index(new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status()
);
}
DeleteRequest deleteRequest = new DeleteRequest("index", id);
bulkRequest.add(deleteRequest);
} else {
BytesReference source = BytesReference.bytes(XContentBuilder.builder(xContentType.xContent())
.startObject().field("id", i).endObject());
BytesReference source = BytesReference.bytes(
XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject()
);
if (opType == DocWriteRequest.OpType.INDEX) {
IndexRequest indexRequest = new IndexRequest("index").id(id).source(source, xContentType);
if (erroneous) {
@ -926,12 +994,12 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
bulkRequest.add(createRequest);
} else if (opType == DocWriteRequest.OpType.UPDATE) {
UpdateRequest updateRequest = new UpdateRequest("index", id)
.doc(new IndexRequest().source(source, xContentType));
UpdateRequest updateRequest = new UpdateRequest("index", id).doc(new IndexRequest().source(source, xContentType));
if (erroneous == false) {
assertEquals(RestStatus.CREATED,
highLevelClient().index(
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
assertEquals(
RestStatus.CREATED,
highLevelClient().index(new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status()
);
}
bulkRequest.add(updateRequest);
}
@ -974,13 +1042,12 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
}
};
try (BulkProcessor processor = BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(request,
RequestOptions.DEFAULT, bulkListener), listener)
.setConcurrentRequests(0)
.setBulkSize(new ByteSizeValue(5, ByteSizeUnit.GB))
.setBulkActions(nbItems + 1)
.build()) {
try (
BulkProcessor processor = BulkProcessor.builder(
(request, bulkListener) -> highLevelClient().bulkAsync(request, RequestOptions.DEFAULT, bulkListener),
listener
).setConcurrentRequests(0).setBulkSize(new ByteSizeValue(5, ByteSizeUnit.GB)).setBulkActions(nbItems + 1).build()
) {
for (int i = 0; i < nbItems; i++) {
String id = String.valueOf(i);
boolean erroneous = randomBoolean();
@ -989,9 +1056,10 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values());
if (opType == DocWriteRequest.OpType.DELETE) {
if (erroneous == false) {
assertEquals(RestStatus.CREATED,
highLevelClient().index(
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
assertEquals(
RestStatus.CREATED,
highLevelClient().index(new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status()
);
}
DeleteRequest deleteRequest = new DeleteRequest("index", id);
processor.add(deleteRequest);
@ -1013,12 +1081,13 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
processor.add(createRequest);
} else if (opType == DocWriteRequest.OpType.UPDATE) {
UpdateRequest updateRequest = new UpdateRequest("index", id)
.doc(new IndexRequest().source(xContentType, "id", i));
UpdateRequest updateRequest = new UpdateRequest("index", id).doc(new IndexRequest().source(xContentType, "id", i));
if (erroneous == false) {
assertEquals(RestStatus.CREATED,
highLevelClient().index(
new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT).status());
assertEquals(
RestStatus.CREATED,
highLevelClient().index(new IndexRequest("index").id(id).source("field", -1), RequestOptions.DEFAULT)
.status()
);
}
processor.add(updateRequest);
}
@ -1028,7 +1097,6 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertNull(requestRef.get());
}
BulkResponse bulkResponse = responseRef.get();
BulkRequest bulkRequest = requestRef.get();
@ -1065,8 +1133,8 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
public void testUrlEncode() throws IOException {
String indexPattern = "<logstash-{now/M}>";
String expectedIndex = "logstash-" +
DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy());
String expectedIndex = "logstash-"
+ DateTimeFormat.forPattern("YYYY.MM.dd").print(new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy());
{
IndexRequest indexRequest = new IndexRequest(indexPattern).id("id#1");
indexRequest.source("field", "value");
@ -1106,7 +1174,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
}
public void testParamsEncode() throws IOException {
//parameters are encoded by the low-level client but let's test that everything works the same when we use the high-level one
// parameters are encoded by the low-level client but let's test that everything works the same when we use the high-level one
String routing = "routing/中文value#1?";
{
IndexRequest indexRequest = new IndexRequest("index").id("id");
@ -1151,20 +1219,16 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
final String sourceIndex = "index1";
{
// prepare : index docs
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
Settings settings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build();
String mappings = "\"properties\":{\"field\":{\"type\":\"text\"}}";
createIndex(sourceIndex, settings, mappings);
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
new BulkRequest()
.add(new IndexRequest(sourceIndex).id("1")
.source(Collections.singletonMap("field", "value1"), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("2")
.source(Collections.singletonMap("field", "value2"), XContentType.JSON))
new BulkRequest().add(
new IndexRequest(sourceIndex).id("1").source(Collections.singletonMap("field", "value1"), XContentType.JSON)
)
.add(new IndexRequest(sourceIndex).id("2").source(Collections.singletonMap("field", "value2"), XContentType.JSON))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
RequestOptions.DEFAULT
).status()
@ -1178,11 +1242,19 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
TermVectorsResponse.TermVector.Token expectedToken = new TermVectorsResponse.TermVector.Token(0, 6, 0, null);
TermVectorsResponse.TermVector.Term expectedTerm = new TermVectorsResponse.TermVector.Term(
"value1", 1, null, null, null, Collections.singletonList(expectedToken));
TermVectorsResponse.TermVector.FieldStatistics expectedFieldStats =
new TermVectorsResponse.TermVector.FieldStatistics(2, 2, 2);
TermVectorsResponse.TermVector expectedTV =
new TermVectorsResponse.TermVector("field", expectedFieldStats, Collections.singletonList(expectedTerm));
"value1",
1,
null,
null,
null,
Collections.singletonList(expectedToken)
);
TermVectorsResponse.TermVector.FieldStatistics expectedFieldStats = new TermVectorsResponse.TermVector.FieldStatistics(2, 2, 2);
TermVectorsResponse.TermVector expectedTV = new TermVectorsResponse.TermVector(
"field",
expectedFieldStats,
Collections.singletonList(expectedTerm)
);
List<TermVectorsResponse.TermVector> expectedTVlist = Collections.singletonList(expectedTV);
assertThat(tvResponse.getIndex(), equalTo(sourceIndex));
@ -1200,11 +1272,19 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
TermVectorsResponse.TermVector.Token expectedToken = new TermVectorsResponse.TermVector.Token(0, 6, 0, null);
TermVectorsResponse.TermVector.Term expectedTerm = new TermVectorsResponse.TermVector.Term(
"valuex", 1, null, null, null, Collections.singletonList(expectedToken));
TermVectorsResponse.TermVector.FieldStatistics expectedFieldStats =
new TermVectorsResponse.TermVector.FieldStatistics(2, 2, 2);
TermVectorsResponse.TermVector expectedTV =
new TermVectorsResponse.TermVector("field", expectedFieldStats, Collections.singletonList(expectedTerm));
"valuex",
1,
null,
null,
null,
Collections.singletonList(expectedToken)
);
TermVectorsResponse.TermVector.FieldStatistics expectedFieldStats = new TermVectorsResponse.TermVector.FieldStatistics(2, 2, 2);
TermVectorsResponse.TermVector expectedTV = new TermVectorsResponse.TermVector(
"field",
expectedFieldStats,
Collections.singletonList(expectedTerm)
);
List<TermVectorsResponse.TermVector> expectedTVlist = Collections.singletonList(expectedTV);
assertThat(tvResponse.getIndex(), equalTo(sourceIndex));
@ -1217,8 +1297,10 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
public void testTermvectorsWithNonExistentIndex() {
TermVectorsRequest request = new TermVectorsRequest("non-existent", "non-existent");
OpenSearchException exception = expectThrows(OpenSearchException.class,
() -> execute(request, highLevelClient()::termvectors, highLevelClient()::termvectorsAsync));
OpenSearchException exception = expectThrows(
OpenSearchException.class,
() -> execute(request, highLevelClient()::termvectors, highLevelClient()::termvectorsAsync)
);
assertEquals(RestStatus.NOT_FOUND, exception.status());
}
@ -1227,10 +1309,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
final String sourceIndex = "index1";
{
// prepare : index docs
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
Settings settings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build();
String mappings = "\"properties\":{\"field\":{\"type\":\"text\"}, \"field2\":{\"type\":\"text\"}}";
createIndex(sourceIndex, settings, mappings);
final Map<String, String> doc1 = new HashMap<>();
@ -1242,8 +1321,7 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
new BulkRequest()
.add(new IndexRequest(sourceIndex).id("1").source(doc1, XContentType.JSON))
new BulkRequest().add(new IndexRequest(sourceIndex).id("1").source(doc1, XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("2").source(doc2, XContentType.JSON))
.setRefreshPolicy(RefreshPolicy.IMMEDIATE),
RequestOptions.DEFAULT
@ -1252,16 +1330,19 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
}
{
// test _mtermvectors where MultiTermVectorsRequest is constructed with ids and a template
String[] expectedIds = {"1", "2"};
String[] expectedIds = { "1", "2" };
TermVectorsRequest tvRequestTemplate = new TermVectorsRequest(sourceIndex, "fake_id");
tvRequestTemplate.setFields("field");
MultiTermVectorsRequest mtvRequest = new MultiTermVectorsRequest(expectedIds, tvRequestTemplate);
MultiTermVectorsResponse mtvResponse =
execute(mtvRequest, highLevelClient()::mtermvectors, highLevelClient()::mtermvectorsAsync);
MultiTermVectorsResponse mtvResponse = execute(
mtvRequest,
highLevelClient()::mtermvectors,
highLevelClient()::mtermvectorsAsync
);
List<String> ids = new ArrayList<>();
for (TermVectorsResponse tvResponse: mtvResponse.getTermVectorsResponses()) {
for (TermVectorsResponse tvResponse : mtvResponse.getTermVectorsResponses()) {
assertThat(tvResponse.getIndex(), equalTo(sourceIndex));
assertTrue(tvResponse.getFound());
ids.add(tvResponse.getId());
@ -1281,9 +1362,12 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
TermVectorsRequest tvRequest2 = new TermVectorsRequest(sourceIndex, docBuilder);
mtvRequest.add(tvRequest2);
MultiTermVectorsResponse mtvResponse =
execute(mtvRequest, highLevelClient()::mtermvectors, highLevelClient()::mtermvectorsAsync);
for (TermVectorsResponse tvResponse: mtvResponse.getTermVectorsResponses()) {
MultiTermVectorsResponse mtvResponse = execute(
mtvRequest,
highLevelClient()::mtermvectors,
highLevelClient()::mtermvectorsAsync
);
for (TermVectorsResponse tvResponse : mtvResponse.getTermVectorsResponses()) {
assertThat(tvResponse.getIndex(), equalTo(sourceIndex));
assertTrue(tvResponse.getFound());
}
@ -1304,8 +1388,11 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
tvRequest3.setFields("field", "field2");
mtvRequest.add(tvRequest3);
MultiTermVectorsResponse mtvResponse =
execute(mtvRequest, highLevelClient()::mtermvectors, highLevelClient()::mtermvectorsAsync);
MultiTermVectorsResponse mtvResponse = execute(
mtvRequest,
highLevelClient()::mtermvectors,
highLevelClient()::mtermvectorsAsync
);
final List<String> expectedFields = new ArrayList<>();
expectedFields.add("field");
expectedFields.add("field2");
@ -1322,7 +1409,8 @@ public class CrudIT extends OpenSearchRestHighLevelClientTestCase {
assertEquals(expectedRespFields.get(i).size(), tvResponse.getTermVectorsList().size());
assertEquals(
expectedRespFields.get(i),
tvResponse.getTermVectorsList().stream().map(tv -> tv.getFieldName()).collect(Collectors.toList()));
tvResponse.getTermVectorsList().stream().map(tv -> tv.getFieldName()).collect(Collectors.toList())
);
}
}
}

View File

@ -43,11 +43,6 @@ import org.apache.http.message.BasicStatusLine;
import org.apache.http.nio.entity.NByteArrayEntity;
import org.apache.lucene.util.BytesRef;
import org.opensearch.Build;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.Response;
import org.opensearch.client.ResponseListener;
import org.opensearch.client.RestClient;
import org.opensearch.Version;
import org.opensearch.action.ActionListener;
import org.opensearch.action.main.MainRequest;
@ -92,15 +87,11 @@ public class CustomRestHighLevelClientTests extends OpenSearchTestCase {
final RestClient restClient = mock(RestClient.class);
restHighLevelClient = new CustomRestClient(restClient);
doAnswer(inv -> mockPerformRequest((Request) inv.getArguments()[0]))
.when(restClient)
.performRequest(any(Request.class));
doAnswer(inv -> mockPerformRequest((Request) inv.getArguments()[0])).when(restClient).performRequest(any(Request.class));
doAnswer(inv -> mockPerformRequestAsync(
((Request) inv.getArguments()[0]),
(ResponseListener) inv.getArguments()[1]))
.when(restClient)
.performRequestAsync(any(Request.class), any(ResponseListener.class));
doAnswer(inv -> mockPerformRequestAsync(((Request) inv.getArguments()[0]), (ResponseListener) inv.getArguments()[1])).when(
restClient
).performRequestAsync(any(Request.class), any(ResponseListener.class));
}
}
@ -140,21 +131,21 @@ public class CustomRestHighLevelClientTests extends OpenSearchTestCase {
*/
@SuppressForbidden(reason = "We're forced to uses Class#getDeclaredMethods() here because this test checks protected methods")
public void testMethodsVisibility() {
final String[] methodNames = new String[]{"convertExistsResponse",
"parseEntity",
"parseResponseException",
"performRequest",
"performRequestAndParseEntity",
"performRequestAndParseOptionalEntity",
"performRequestAsync",
"performRequestAsyncAndParseEntity",
"performRequestAsyncAndParseOptionalEntity"
};
final String[] methodNames = new String[] {
"convertExistsResponse",
"parseEntity",
"parseResponseException",
"performRequest",
"performRequestAndParseEntity",
"performRequestAndParseOptionalEntity",
"performRequestAsync",
"performRequestAsyncAndParseEntity",
"performRequestAsyncAndParseOptionalEntity" };
final Set<String> protectedMethods = Arrays.stream(RestHighLevelClient.class.getDeclaredMethods())
.filter(method -> Modifier.isProtected(method.getModifiers()))
.map(Method::getName)
.collect(Collectors.toCollection(TreeSet::new));
final Set<String> protectedMethods = Arrays.stream(RestHighLevelClient.class.getDeclaredMethods())
.filter(method -> Modifier.isProtected(method.getModifiers()))
.map(Method::getName)
.collect(Collectors.toCollection(TreeSet::new));
assertThat(protectedMethods, contains(methodNames));
}

View File

@ -97,8 +97,8 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
return p -> p.equals("") // do not add elements at the top-level as any element at this level is parsed as a new index
|| p.endsWith(".aliases") // do not add new alias
|| p.contains(".filter"); // do not insert random data into AliasMetadata#filter
|| p.endsWith(".aliases") // do not add new alias
|| p.contains(".filter"); // do not insert random data into AliasMetadata#filter
}
@Override
@ -116,44 +116,41 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
}
public void testFromXContentWithOpenSearchException() throws IOException {
String xContent =
"{" +
" \"error\": {" +
" \"root_cause\": [" +
" {" +
" \"type\": \"index_not_found_exception\"," +
" \"reason\": \"no such index [index]\"," +
" \"resource.type\": \"index_or_alias\"," +
" \"resource.id\": \"index\"," +
" \"index_uuid\": \"_na_\"," +
" \"index\": \"index\"" +
" }" +
" ]," +
" \"type\": \"index_not_found_exception\"," +
" \"reason\": \"no such index [index]\"," +
" \"resource.type\": \"index_or_alias\"," +
" \"resource.id\": \"index\"," +
" \"index_uuid\": \"_na_\"," +
" \"index\": \"index\"" +
" }," +
" \"status\": 404" +
"}";
String xContent = "{"
+ " \"error\": {"
+ " \"root_cause\": ["
+ " {"
+ " \"type\": \"index_not_found_exception\","
+ " \"reason\": \"no such index [index]\","
+ " \"resource.type\": \"index_or_alias\","
+ " \"resource.id\": \"index\","
+ " \"index_uuid\": \"_na_\","
+ " \"index\": \"index\""
+ " }"
+ " ],"
+ " \"type\": \"index_not_found_exception\","
+ " \"reason\": \"no such index [index]\","
+ " \"resource.type\": \"index_or_alias\","
+ " \"resource.id\": \"index\","
+ " \"index_uuid\": \"_na_\","
+ " \"index\": \"index\""
+ " },"
+ " \"status\": 404"
+ "}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) {
GetAliasesResponse getAliasesResponse = GetAliasesResponse.fromXContent(parser);
assertThat(getAliasesResponse.getError(), nullValue());
assertThat(getAliasesResponse.status(), equalTo(RestStatus.NOT_FOUND));
assertThat(getAliasesResponse.getException().getMessage(),
equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]"));
assertThat(
getAliasesResponse.getException().getMessage(),
equalTo("OpenSearch exception [type=index_not_found_exception, reason=no such index [index]]")
);
}
}
public void testFromXContentWithNoAliasFound() throws IOException {
String xContent =
"{" +
" \"error\": \"alias [aa] missing\"," +
" \"status\": 404" +
"}";
String xContent = "{" + " \"error\": \"alias [aa] missing\"," + " \"status\": 404" + "}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) {
GetAliasesResponse getAliasesResponse = GetAliasesResponse.fromXContent(parser);
assertThat(getAliasesResponse.status(), equalTo(RestStatus.NOT_FOUND));
@ -163,16 +160,15 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase<GetAliases
}
public void testFromXContentWithMissingAndFoundAlias() throws IOException {
String xContent =
"{" +
" \"error\": \"alias [something] missing\"," +
" \"status\": 404," +
" \"index\": {" +
" \"aliases\": {" +
" \"alias\": {}" +
" }" +
" }" +
"}";
String xContent = "{"
+ " \"error\": \"alias [something] missing\","
+ " \"status\": 404,"
+ " \"index\": {"
+ " \"aliases\": {"
+ " \"alias\": {}"
+ " }"
+ " }"
+ "}";
final String index = "index";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) {
GetAliasesResponse response = GetAliasesResponse.fromXContent(parser);

Some files were not shown because too many files have changed in this diff Show More