From eaa05fe76236fdba70e3c54c3b830f2269e9669a Mon Sep 17 00:00:00 2001 From: Emily S Date: Tue, 23 Oct 2018 19:09:09 +0200 Subject: [PATCH 01/67] [TEST] Minor updates to rest api spec tests (#34551) --- .../src/main/resources/rest-api-spec/test/bulk/10_basic.yml | 1 + .../resources/rest-api-spec/test/bulk/11_basic_with_types.yml | 2 ++ .../main/resources/rest-api-spec/test/msearch/10_basic.yml | 4 ++-- .../main/resources/rest-api-spec/test/tasks.list/10_basic.yml | 1 + 4 files changed, 6 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml index dc7242f2875..536cb28d548 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml @@ -68,6 +68,7 @@ - skip: version: " - 6.99.99" + features: headers reason: include_type_name was introduced in 7.0.0 - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml index 233ff32b418..7e763cded31 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml @@ -58,6 +58,8 @@ --- "empty action": + - skip: + features: headers - do: catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml index fb884ddfca2..a14423cef11 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/10_basic.yml @@ -66,9 +66,9 @@ setup: "Least impact smoke test": # only passing these parameters to make sure they are consumed - do: - max_concurrent_shard_requests: 1 - max_concurrent_searches: 1 msearch: + max_concurrent_shard_requests: 1 + max_concurrent_searches: 1 body: - index: index_* - query: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml index 57bf5b629b7..4fdfc378bee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.list/10_basic.yml @@ -22,6 +22,7 @@ "tasks_list headers": - skip: version: " - 6.99.99" + features: headers reason: task headers has been added in 7.0.0 - do: From c447fc258a2fc8e7c193c928310fb4e49844e4a2 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 23 Oct 2018 12:30:23 -0500 Subject: [PATCH 02/67] ingest: documentation for the drop processor (#34570) --- docs/reference/ingest/ingest-node.asciidoc | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index bad758c8a3c..13dd3de73bd 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1260,6 +1260,21 @@ Reference key modifier example * error = REFUSED |====== +[[drop-processor]] +=== Drop Processor +Drops the document without raising any errors. This is useful to prevent the document from +getting indexed based on some condition. + +[source,js] +-------------------------------------------------- +{ + "drop": { + "if" : "ctx.network_name == 'Guest'" + } +} +-------------------------------------------------- +// NOTCONSOLE + [[dot-expand-processor]] === Dot Expander Processor From ed817fb2659d4a4adbe10601886cfcbacd153c7d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 23 Oct 2018 19:37:45 +0200 Subject: [PATCH 03/67] [CCR] Move leader_index and leader_cluster parameters from resume follow to put follow api (#34638) As part of this change the leader index name and leader cluster name are stored in the CCR metadata in the follow index. The resume follow api will read that when a resume follow request is executed. --- .../xpack/ccr/CcrMultiClusterLicenseIT.java | 8 -- .../xpack/ccr/FollowIndexSecurityIT.java | 33 ++--- .../xpack/ccr/FollowIndexIT.java | 11 +- .../test/ccr/follow_and_unfollow.yml | 4 +- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 2 + .../ccr/action/AutoFollowCoordinator.java | 34 ++--- .../ccr/action/TransportPutFollowAction.java | 9 +- .../action/TransportResumeFollowAction.java | 58 ++++----- .../xpack/ccr/rest/RestPutFollowAction.java | 9 +- .../xpack/CcrSingleNodeTestCase.java | 13 +- .../elasticsearch/xpack/ccr/CcrLicenseIT.java | 5 +- .../xpack/ccr/IndexFollowingIT.java | 95 +++++--------- .../xpack/ccr/LocalIndexFollowingIT.java | 4 +- .../action/AutoFollowCoordinatorTests.java | 16 +-- .../action/PutFollowActionRequestTests.java | 23 +++- .../ResumeFollowActionRequestTests.java | 4 - .../TransportResumeFollowActionTests.java | 17 +-- .../core/ccr/action/PutFollowAction.java | 121 ++++++++++++++++-- .../core/ccr/action/ResumeFollowAction.java | 106 +++++---------- 19 files changed, 307 insertions(+), 265 deletions(-) diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index bc3d846343a..988f6b97bd2 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -31,14 +31,6 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase { return true; } - public void testResumeFollow() { - if (runningAgainstLeaderCluster == false) { - final Request request = new Request("POST", "/follower/_ccr/resume_follow"); - request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"leader\"}"); - assertNonCompliantLicense(request); - } - } - public void testFollow() { if (runningAgainstLeaderCluster == false) { final Request request = new Request("PUT", "/follower/_ccr/follow"); diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 6d5ca4559fe..d5e7cbcce49 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -80,7 +80,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase { refresh(allowedIndex); verifyDocuments(adminClient(), allowedIndex, numDocs); } else { - follow(allowedIndex, allowedIndex); + follow(client(), allowedIndex, allowedIndex); assertBusy(() -> verifyDocuments(client(), allowedIndex, numDocs)); assertThat(countCcrNodeTasks(), equalTo(1)); assertBusy(() -> verifyCcrMonitoring(allowedIndex, allowedIndex)); @@ -93,7 +93,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(countCcrNodeTasks(), equalTo(0)); }); - resumeFollow(allowedIndex, allowedIndex); + resumeFollow(allowedIndex); assertThat(countCcrNodeTasks(), equalTo(1)); assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/pause_follow"))); // Make sure that there are no other ccr relates operations running: @@ -106,11 +106,11 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_close"))); assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/unfollow"))); - Exception e = expectThrows(ResponseException.class, () -> resumeFollow(allowedIndex, allowedIndex)); + Exception e = expectThrows(ResponseException.class, () -> resumeFollow(allowedIndex)); assertThat(e.getMessage(), containsString("follow index [" + allowedIndex + "] does not have ccr metadata")); // User does not have manage_follow_index index privilege for 'unallowedIndex': - e = expectThrows(ResponseException.class, () -> follow(unallowedIndex, unallowedIndex)); + e = expectThrows(ResponseException.class, () -> follow(client(), unallowedIndex, unallowedIndex)); assertThat(e.getMessage(), containsString("action [indices:admin/xpack/ccr/put_follow] is unauthorized for user [test_ccr]")); // Verify that the follow index has not been created and no node tasks are running @@ -119,7 +119,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase { // User does have manage_follow_index index privilege on 'allowed' index, // but not read / monitor roles on 'disallowed' index: - e = expectThrows(ResponseException.class, () -> follow(unallowedIndex, allowedIndex)); + e = expectThrows(ResponseException.class, () -> follow(client(), unallowedIndex, allowedIndex)); assertThat(e.getMessage(), containsString("insufficient privileges to follow index [unallowed-index], " + "privilege for action [indices:monitor/stats] is missing, " + "privilege for action [indices:data/read/xpack/ccr/shard_changes] is missing")); @@ -127,16 +127,20 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(indexExists(adminClient(), unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); - e = expectThrows(ResponseException.class, () -> resumeFollow(unallowedIndex, unallowedIndex)); + follow(adminClient(), unallowedIndex, unallowedIndex); + pauseFollow(adminClient(), unallowedIndex); + + e = expectThrows(ResponseException.class, () -> resumeFollow(unallowedIndex)); assertThat(e.getMessage(), containsString("insufficient privileges to follow index [unallowed-index], " + "privilege for action [indices:monitor/stats] is missing, " + "privilege for action [indices:data/read/xpack/ccr/shard_changes] is missing")); - assertThat(indexExists(adminClient(), unallowedIndex), is(false)); - assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); e = expectThrows(ResponseException.class, () -> client().performRequest(new Request("POST", "/" + unallowedIndex + "/_ccr/unfollow"))); assertThat(e.getMessage(), containsString("action [indices:admin/xpack/ccr/unfollow] is unauthorized for user [test_ccr]")); + assertOK(adminClient().performRequest(new Request("POST", "/" + unallowedIndex + "/_close"))); + assertOK(adminClient().performRequest(new Request("POST", "/" + unallowedIndex + "/_ccr/unfollow"))); + assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); } } @@ -187,7 +191,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase { // Cleanup by deleting auto follow pattern and pause following: request = new Request("DELETE", "/_ccr/auto_follow/test_pattern"); assertOK(client().performRequest(request)); - pauseFollow(allowedIndex); + pauseFollow(client(), allowedIndex); } private int countCcrNodeTasks() throws IOException { @@ -228,18 +232,17 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertOK(adminClient().performRequest(new Request("POST", "/" + index + "/_refresh"))); } - private static void resumeFollow(String leaderIndex, String followIndex) throws IOException { + private static void resumeFollow(String followIndex) throws IOException { final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); - request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"" + leaderIndex + - "\", \"poll_timeout\": \"10ms\"}"); + request.setJsonEntity("{\"poll_timeout\": \"10ms\"}"); assertOK(client().performRequest(request)); } - private static void follow(String leaderIndex, String followIndex) throws IOException { + private static void follow(RestClient client, String leaderIndex, String followIndex) throws IOException { final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); - assertOK(client().performRequest(request)); + assertOK(client.performRequest(request)); } void verifyDocuments(RestClient client, String index, int expectedNumDocs) throws IOException { @@ -302,7 +305,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase { return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } - private static void pauseFollow(String followIndex) throws IOException { + private static void pauseFollow(RestClient client, String followIndex) throws IOException { assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 8e50b3697f6..ff7dc9e72b5 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -71,7 +71,7 @@ public class FollowIndexIT extends ESRestTestCase { assertBusy(() -> verifyDocuments(followIndexName, numDocs)); // unfollow and then follow and then index a few docs in leader index: pauseFollow(followIndexName); - resumeFollow(leaderIndexName, followIndexName); + resumeFollow(followIndexName); try (RestClient leaderClient = buildLeaderClient()) { int id = numDocs; index(leaderClient, leaderIndexName, Integer.toString(id), "field", id, "filtered_field", "true"); @@ -84,14 +84,14 @@ public class FollowIndexIT extends ESRestTestCase { pauseFollow(followIndexName); assertOK(client().performRequest(new Request("POST", "/" + followIndexName + "/_close"))); assertOK(client().performRequest(new Request("POST", "/" + followIndexName + "/_ccr/unfollow"))); - Exception e = expectThrows(ResponseException.class, () -> resumeFollow(leaderIndexName, followIndexName)); + Exception e = expectThrows(ResponseException.class, () -> resumeFollow(followIndexName)); assertThat(e.getMessage(), containsString("follow index [" + followIndexName + "] does not have ccr metadata")); } } public void testFollowNonExistingLeaderIndex() throws Exception { assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); - ResponseException e = expectThrows(ResponseException.class, () -> resumeFollow("non-existing-index", "non-existing-index")); + ResponseException e = expectThrows(ResponseException.class, () -> resumeFollow("non-existing-index")); assertThat(e.getMessage(), containsString("no such index")); assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); @@ -151,10 +151,9 @@ public class FollowIndexIT extends ESRestTestCase { assertOK(client().performRequest(new Request("POST", "/" + index + "/_refresh"))); } - private static void resumeFollow(String leaderIndex, String followIndex) throws IOException { + private static void resumeFollow(String followIndex) throws IOException { final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); - request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"" + leaderIndex + - "\", \"poll_timeout\": \"10ms\"}"); + request.setJsonEntity("{\"poll_timeout\": \"10ms\"}"); assertOK(client().performRequest(request)); } diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml index 9289be50b21..f66825d0b92 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml @@ -52,9 +52,7 @@ - do: ccr.resume_follow: index: bar - body: - leader_cluster: local - leader_index: foo + body: {} - is_true: acknowledged - do: diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 1c1cade2484..442f2309da4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -97,6 +97,8 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E public static final String CCR_CUSTOM_METADATA_KEY = "ccr"; public static final String CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS = "leader_index_shard_history_uuids"; public static final String CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY = "leader_index_uuid"; + public static final String CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY = "leader_index_name"; + public static final String CCR_CUSTOM_METADATA_LEADER_CLUSTER_NAME_KEY = "leader_cluster_name"; private final boolean enabled; private final Settings settings; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index a18ec3bf6c4..031769d0abb 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -176,11 +176,10 @@ public class AutoFollowCoordinator implements ClusterStateApplier { @Override void createAndFollow(Map headers, - ResumeFollowAction.Request followRequest, + PutFollowAction.Request request, Runnable successHandler, Consumer failureHandler) { Client followerClient = CcrLicenseChecker.wrapClient(client, headers); - PutFollowAction.Request request = new PutFollowAction.Request(followRequest); followerClient.execute( PutFollowAction.INSTANCE, request, @@ -278,7 +277,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } private void checkAutoFollowPattern(String autoFollowPattenName, - String clusterAlias, + String leaderCluster, AutoFollowPattern autoFollowPattern, List leaderIndicesToFollow, Map headers, @@ -302,7 +301,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { resultHandler.accept(new AutoFollowResult(autoFollowPattenName, results.asList())); } } else { - followLeaderIndex(autoFollowPattenName, clusterAlias, indexToFollow, autoFollowPattern, headers, error -> { + followLeaderIndex(autoFollowPattenName, leaderCluster, indexToFollow, autoFollowPattern, headers, error -> { results.set(slot, new Tuple<>(indexToFollow, error)); if (leaderIndicesCountDown.countDown()) { resultHandler.accept(new AutoFollowResult(autoFollowPattenName, results.asList())); @@ -314,7 +313,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } private void followLeaderIndex(String autoFollowPattenName, - String clusterAlias, + String leaderCluster, Index indexToFollow, AutoFollowPattern pattern, Map headers, @@ -322,17 +321,20 @@ public class AutoFollowCoordinator implements ClusterStateApplier { final String leaderIndexName = indexToFollow.getName(); final String followIndexName = getFollowerIndexName(pattern, leaderIndexName); - ResumeFollowAction.Request request = new ResumeFollowAction.Request(); - request.setLeaderCluster(clusterAlias); + ResumeFollowAction.Request followRequest = new ResumeFollowAction.Request(); + followRequest.setFollowerIndex(followIndexName); + followRequest.setMaxBatchOperationCount(pattern.getMaxBatchOperationCount()); + followRequest.setMaxConcurrentReadBatches(pattern.getMaxConcurrentReadBatches()); + followRequest.setMaxBatchSize(pattern.getMaxBatchSize()); + followRequest.setMaxConcurrentWriteBatches(pattern.getMaxConcurrentWriteBatches()); + followRequest.setMaxWriteBufferSize(pattern.getMaxWriteBufferSize()); + followRequest.setMaxRetryDelay(pattern.getMaxRetryDelay()); + followRequest.setPollTimeout(pattern.getPollTimeout()); + + PutFollowAction.Request request = new PutFollowAction.Request(); + request.setLeaderCluster(leaderCluster); request.setLeaderIndex(indexToFollow.getName()); - request.setFollowerIndex(followIndexName); - request.setMaxBatchOperationCount(pattern.getMaxBatchOperationCount()); - request.setMaxConcurrentReadBatches(pattern.getMaxConcurrentReadBatches()); - request.setMaxBatchSize(pattern.getMaxBatchSize()); - request.setMaxConcurrentWriteBatches(pattern.getMaxConcurrentWriteBatches()); - request.setMaxWriteBufferSize(pattern.getMaxWriteBufferSize()); - request.setMaxRetryDelay(pattern.getMaxRetryDelay()); - request.setPollTimeout(pattern.getPollTimeout()); + request.setFollowRequest(followRequest); // Execute if the create and follow api call succeeds: Runnable successHandler = () -> { @@ -418,7 +420,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { abstract void createAndFollow( Map headers, - ResumeFollowAction.Request followRequest, + PutFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler ); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 13d173ed815..66bed231f72 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -95,11 +95,11 @@ public final class TransportPutFollowAction listener.onFailure(LicenseUtils.newComplianceException("ccr")); return; } - String leaderCluster = request.getFollowRequest().getLeaderCluster(); + String leaderCluster = request.getLeaderCluster(); // Validates whether the leader cluster has been configured properly: client.getRemoteClusterClient(leaderCluster); - String leaderIndex = request.getFollowRequest().getLeaderIndex(); + String leaderIndex = request.getLeaderIndex(); createFollowerIndexAndFollowRemoteIndex(request, leaderCluster, leaderIndex, listener); } @@ -122,8 +122,7 @@ public final class TransportPutFollowAction final PutFollowAction.Request request, final ActionListener listener) { if (leaderIndexMetaData == null) { - listener.onFailure(new IllegalArgumentException("leader index [" + request.getFollowRequest().getLeaderIndex() + - "] does not exist")); + listener.onFailure(new IllegalArgumentException("leader index [" + request.getLeaderIndex() + "] does not exist")); return; } @@ -160,6 +159,8 @@ public final class TransportPutFollowAction Map metadata = new HashMap<>(); metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS, String.join(",", historyUUIDs)); metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY, leaderIndexMetaData.getIndexUUID()); + metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY, leaderIndexMetaData.getIndex().getName()); + metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_CLUSTER_NAME_KEY, request.getLeaderCluster()); imdBuilder.putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, metadata); // Copy all settings, but overwrite a few settings. diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 569e2d2cacf..d65189434fa 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexingSlowLog; import org.elasticsearch.index.SearchSlowLog; @@ -97,33 +98,34 @@ public class TransportResumeFollowAction extends HandledTransportAction listener) { final ClusterState state = clusterService.state(); final IndexMetaData followerIndexMetadata = state.getMetaData().index(request.getFollowerIndex()); + if (followerIndexMetadata == null) { + listener.onFailure(new IndexNotFoundException(request.getFollowerIndex())); + return; + } + + final Map ccrMetadata = followerIndexMetadata.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY); + if (ccrMetadata == null) { + throw new IllegalArgumentException("follow index ["+ request.getFollowerIndex() + "] does not have ccr metadata"); + } + final String leaderCluster = ccrMetadata.get(Ccr.CCR_CUSTOM_METADATA_LEADER_CLUSTER_NAME_KEY); + // Validates whether the leader cluster has been configured properly: + client.getRemoteClusterClient(leaderCluster); + final String leaderIndex = ccrMetadata.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY); ccrLicenseChecker.checkRemoteClusterLicenseAndFetchLeaderIndexMetadataAndHistoryUUIDs( - client, - clusterAlias, - leaderIndex, - listener::onFailure, - (leaderHistoryUUID, leaderIndexMetadata) -> { - try { - start(request, clusterAlias, leaderIndexMetadata, followerIndexMetadata, leaderHistoryUUID, listener); - } catch (final IOException e) { - listener.onFailure(e); - } - }); + client, + leaderCluster, + leaderIndex, + listener::onFailure, + (leaderHistoryUUID, leaderIndexMetadata) -> { + try { + start(request, leaderCluster, leaderIndexMetadata, followerIndexMetadata, leaderHistoryUUID, listener); + } catch (final IOException e) { + listener.onFailure(e); + } + }); } /** @@ -207,13 +209,6 @@ public class TransportResumeFollowAction extends HandledTransportAction ccrIndexMetadata = followIndex.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY); if (ccrIndexMetadata == null) { throw new IllegalArgumentException("follow index ["+ followIndex.getIndex().getName() + "] does not have ccr metadata"); @@ -238,7 +233,8 @@ public class TransportResumeFollowAction extends HandledTransportAction client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } + + static Request createRequest(RestRequest restRequest) throws IOException { + try (XContentParser parser = restRequest.contentOrSourceParamParser()) { + return Request.fromXContent(parser, restRequest.param("index")); + } + } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java index 3f5c340deed..001134515cc 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java @@ -16,6 +16,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.LocalStateCcr; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.junit.After; import org.junit.Before; @@ -63,14 +64,20 @@ public abstract class CcrSingleNodeTestCase extends ESSingleNodeTestCase { assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); } - protected ResumeFollowAction.Request getFollowRequest() { + protected ResumeFollowAction.Request getResumeFollowRequest() { ResumeFollowAction.Request request = new ResumeFollowAction.Request(); - request.setLeaderCluster("local"); - request.setLeaderIndex("leader"); request.setFollowerIndex("follower"); request.setMaxRetryDelay(TimeValue.timeValueMillis(10)); request.setPollTimeout(TimeValue.timeValueMillis(10)); return request; } + protected PutFollowAction.Request getPutFollowRequest() { + PutFollowAction.Request request = new PutFollowAction.Request(); + request.setLeaderCluster("local"); + request.setLeaderIndex("leader"); + request.setFollowRequest(getResumeFollowRequest()); + return request; + } + } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index ab14f2dfb8e..a5803f10a75 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -49,7 +49,7 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase { } public void testThatFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { - final ResumeFollowAction.Request followRequest = getFollowRequest(); + final ResumeFollowAction.Request followRequest = getResumeFollowRequest(); final CountDownLatch latch = new CountDownLatch(1); client().execute( ResumeFollowAction.INSTANCE, @@ -71,8 +71,7 @@ public class CcrLicenseIT extends CcrSingleNodeTestCase { } public void testThatCreateAndFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { - final ResumeFollowAction.Request followRequest = getFollowRequest(); - final PutFollowAction.Request createAndFollowRequest = new PutFollowAction.Request(followRequest); + final PutFollowAction.Request createAndFollowRequest = getPutFollowRequest(); final CountDownLatch latch = new CountDownLatch(1); client().execute( PutFollowAction.INSTANCE, diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index 17bb6c8d70d..d0cc41f22c2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -82,7 +82,6 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -100,7 +99,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureLeaderYellow("index1"); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); final int firstBatchNumDocs = randomIntBetween(2, 64); @@ -162,7 +161,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureLeaderYellow("index1"); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); final long firstBatchNumDocs = randomIntBetween(2, 64); @@ -202,7 +201,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { .build())); ensureLeaderGreen("index1"); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1", "doc", "1").setSource("{\"f\":1}", XContentType.JSON).get(); @@ -252,7 +251,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { long numDocsIndexed = Math.min(3000 * 2, randomLongBetween(maxReadSize, maxReadSize * 10)); atLeastDocsIndexed(leaderClient(), "index1", numDocsIndexed / 3); - PutFollowAction.Request followRequest = follow("index1", "index2"); + PutFollowAction.Request followRequest = putFollow("index1", "index2"); followRequest.getFollowRequest().setMaxBatchOperationCount(maxReadSize); followRequest.getFollowRequest().setMaxConcurrentReadBatches(randomIntBetween(2, 10)); followRequest.getFollowRequest().setMaxConcurrentWriteBatches(randomIntBetween(2, 10)); @@ -295,7 +294,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { }); thread.start(); - PutFollowAction.Request followRequest = follow("index1", "index2"); + PutFollowAction.Request followRequest = putFollow("index1", "index2"); followRequest.getFollowRequest().setMaxBatchOperationCount(randomIntBetween(32, 2048)); followRequest.getFollowRequest().setMaxConcurrentReadBatches(randomIntBetween(2, 10)); followRequest.getFollowRequest().setMaxConcurrentWriteBatches(randomIntBetween(2, 10)); @@ -323,7 +322,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureLeaderGreen("index1"); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); final int numDocs = randomIntBetween(2, 64); @@ -372,22 +371,17 @@ public class IndexFollowingIT extends CcrIntegTestCase { ensureLeaderGreen("test-leader"); ensureFollowerGreen("test-follower"); // Leader index does not exist. - ResumeFollowAction.Request followRequest1 = resumeFollow("non-existent-leader", "test-follower"); - expectThrows(IndexNotFoundException.class, () -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest1).actionGet()); expectThrows(IndexNotFoundException.class, - () -> followerClient().execute(PutFollowAction.INSTANCE, new PutFollowAction.Request(followRequest1)) + () -> followerClient().execute(PutFollowAction.INSTANCE, putFollow("non-existent-leader", "test-follower")) .actionGet()); // Follower index does not exist. - ResumeFollowAction.Request followRequest2 = resumeFollow("non-test-leader", "non-existent-follower"); + ResumeFollowAction.Request followRequest1 = resumeFollow("non-existent-follower"); + expectThrows(IndexNotFoundException.class, () -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest1).actionGet()); + // Both indices do not exist. + ResumeFollowAction.Request followRequest2 = resumeFollow("non-existent-follower"); expectThrows(IndexNotFoundException.class, () -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest2).actionGet()); expectThrows(IndexNotFoundException.class, - () -> followerClient().execute(PutFollowAction.INSTANCE, new PutFollowAction.Request(followRequest2)) - .actionGet()); - // Both indices do not exist. - ResumeFollowAction.Request followRequest3 = resumeFollow("non-existent-leader", "non-existent-follower"); - expectThrows(IndexNotFoundException.class, () -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest3).actionGet()); - expectThrows(IndexNotFoundException.class, - () -> followerClient().execute(PutFollowAction.INSTANCE, new PutFollowAction.Request(followRequest3)) + () -> followerClient().execute(PutFollowAction.INSTANCE, putFollow("non-existing-leader", "non-existing-follower")) .actionGet()); } @@ -404,7 +398,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { leaderClient().prepareIndex("index1", "doc", Integer.toString(i)).setSource(source, XContentType.JSON).get(); } - PutFollowAction.Request followRequest = follow("index1", "index2"); + PutFollowAction.Request followRequest = putFollow("index1", "index2"); followRequest.getFollowRequest().setMaxBatchSize(new ByteSizeValue(1, ByteSizeUnit.BYTES)); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); @@ -427,37 +421,11 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertTotalNumberOfOptimizedIndexing(resolveFollowerIndex("index2"), 1, numDocs); } - public void testDontFollowTheWrongIndex() throws Exception { - String leaderIndexSettings = getIndexSettings(1, 0, - Collections.singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); - assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); - ensureLeaderGreen("index1"); - assertAcked(leaderClient().admin().indices().prepareCreate("index3").setSource(leaderIndexSettings, XContentType.JSON)); - ensureLeaderGreen("index3"); - - PutFollowAction.Request followRequest = follow("index1", "index2"); - followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); - - followRequest = follow("index3", "index4"); - followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); - pauseFollow("index2", "index4"); - - ResumeFollowAction.Request wrongRequest1 = resumeFollow("index1", "index4"); - Exception e = expectThrows(IllegalArgumentException.class, - () -> followerClient().execute(ResumeFollowAction.INSTANCE, wrongRequest1).actionGet()); - assertThat(e.getMessage(), containsString("follow index [index4] should reference")); - - ResumeFollowAction.Request wrongRequest2 = resumeFollow("index3", "index2"); - e = expectThrows(IllegalArgumentException.class, - () -> followerClient().execute(ResumeFollowAction.INSTANCE, wrongRequest2).actionGet()); - assertThat(e.getMessage(), containsString("follow index [index2] should reference")); - } - public void testAttemptToChangeCcrFollowingIndexSetting() throws Exception { String leaderIndexSettings = getIndexSettings(1, 0, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON).get()); ensureLeaderYellow("index1"); - PutFollowAction.Request followRequest = follow("index1", "index2"); + PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); pauseFollow("index2"); followerClient().admin().indices().close(new CloseIndexRequest("index2")).actionGet(); @@ -478,7 +446,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build())); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); @@ -512,7 +480,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build())); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); @@ -541,7 +509,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build())); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); @@ -570,7 +538,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build())); - final PutFollowAction.Request followRequest = follow("index1", "index2"); + final PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1", "doc", "1").setSource("{}", XContentType.JSON).get(); @@ -595,7 +563,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { public void testUnfollowIndex() throws Exception { String leaderIndexSettings = getIndexSettings(1, 0, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON).get()); - PutFollowAction.Request followRequest = follow("index1", "index2"); + PutFollowAction.Request followRequest = putFollow("index1", "index2"); followerClient().execute(PutFollowAction.INSTANCE, followRequest).get(); leaderClient().prepareIndex("index1", "doc").setSource("{}", XContentType.JSON).get(); assertBusy(() -> { @@ -647,7 +615,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { }); threads[i].start(); } - PutFollowAction.Request follow = follow("leader-index", "follower-index"); + PutFollowAction.Request follow = putFollow("leader-index", "follower-index"); followerClient().execute(PutFollowAction.INSTANCE, follow).get(); ensureFollowerGreen("follower-index"); atLeastDocsIndexed(followerClient(), "follower-index", between(20, 60)); @@ -674,14 +642,11 @@ public class IndexFollowingIT extends CcrIntegTestCase { Collections.singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); assertAcked(leaderClient().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); ensureLeaderGreen("index1"); - PutFollowAction.Request followRequest = follow("index1", "index2"); - followRequest.getFollowRequest().setLeaderCluster("another_cluster"); + PutFollowAction.Request followRequest = putFollow("index1", "index2"); + followRequest.setLeaderCluster("another_cluster"); Exception e = expectThrows(IllegalArgumentException.class, () -> followerClient().execute(PutFollowAction.INSTANCE, followRequest).actionGet()); assertThat(e.getMessage(), equalTo("unknown cluster alias [another_cluster]")); - e = expectThrows(IllegalArgumentException.class, - () -> followerClient().execute(ResumeFollowAction.INSTANCE, followRequest.getFollowRequest()).actionGet()); - assertThat(e.getMessage(), equalTo("unknown cluster alias [another_cluster]")); PutAutoFollowPatternAction.Request putAutoFollowRequest = new PutAutoFollowPatternAction.Request(); putAutoFollowRequest.setName("name"); putAutoFollowRequest.setLeaderCluster("another_cluster"); @@ -696,7 +661,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { String leaderIndexSettings = getIndexSettings(1, numberOfReplicas, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); assertAcked(leaderClient().admin().indices().prepareCreate("leader-index").setSource(leaderIndexSettings, XContentType.JSON)); - PutFollowAction.Request follow = follow("leader-index", "follower-index"); + PutFollowAction.Request follow = putFollow("leader-index", "follower-index"); followerClient().execute(PutFollowAction.INSTANCE, follow).get(); getFollowerCluster().ensureAtLeastNumDataNodes(numberOfReplicas + between(2, 3)); ensureFollowerGreen("follower-index"); @@ -998,14 +963,16 @@ public class IndexFollowingIT extends CcrIntegTestCase { }); } - public static PutFollowAction.Request follow(String leaderIndex, String followerIndex) { - return new PutFollowAction.Request(resumeFollow(leaderIndex, followerIndex)); - } - - public static ResumeFollowAction.Request resumeFollow(String leaderIndex, String followerIndex) { - ResumeFollowAction.Request request = new ResumeFollowAction.Request(); + public static PutFollowAction.Request putFollow(String leaderIndex, String followerIndex) { + PutFollowAction.Request request = new PutFollowAction.Request(); request.setLeaderCluster("leader_cluster"); request.setLeaderIndex(leaderIndex); + request.setFollowRequest(resumeFollow(followerIndex)); + return request; + } + + public static ResumeFollowAction.Request resumeFollow(String followerIndex) { + ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setFollowerIndex(followerIndex); request.setMaxRetryDelay(TimeValue.timeValueMillis(10)); request.setPollTimeout(TimeValue.timeValueMillis(10)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java index 5ff1c67f323..3267be6f420 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java @@ -31,7 +31,7 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { assertAcked(client().admin().indices().prepareCreate("leader").setSource(leaderIndexSettings, XContentType.JSON)); ensureGreen("leader"); - final PutFollowAction.Request followRequest = new PutFollowAction.Request(getFollowRequest()); + final PutFollowAction.Request followRequest = getPutFollowRequest(); client().execute(PutFollowAction.INSTANCE, followRequest).get(); final long firstBatchNumDocs = randomIntBetween(2, 64); @@ -61,7 +61,7 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { client().prepareIndex("leader", "doc").setSource("{}", XContentType.JSON).get(); } - client().execute(ResumeFollowAction.INSTANCE, getFollowRequest()).get(); + client().execute(ResumeFollowAction.INSTANCE, getResumeFollowRequest()).get(); assertBusy(() -> { assertThat(client().prepareSearch("follower").get().getHits().totalHits, equalTo(firstBatchNumDocs + secondBatchNumDocs + thirdBatchNumDocs)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 3f4c70f0165..9db8d5f55f0 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollower; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; -import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; +import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import java.util.ArrayList; import java.util.Arrays; @@ -91,13 +91,13 @@ public class AutoFollowCoordinatorTests extends ESTestCase { @Override void createAndFollow(Map headers, - ResumeFollowAction.Request followRequest, + PutFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { assertThat(headers, equalTo(autoFollowHeaders.get("remote"))); assertThat(followRequest.getLeaderCluster(), equalTo("remote")); assertThat(followRequest.getLeaderIndex(), equalTo("logs-20190101")); - assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); + assertThat(followRequest.getFollowRequest().getFollowerIndex(), equalTo("logs-20190101")); successHandler.run(); } @@ -150,7 +150,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { @Override void createAndFollow(Map headers, - ResumeFollowAction.Request followRequest, + PutFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { fail("should not get here"); @@ -211,12 +211,12 @@ public class AutoFollowCoordinatorTests extends ESTestCase { @Override void createAndFollow(Map headers, - ResumeFollowAction.Request followRequest, + PutFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { assertThat(followRequest.getLeaderCluster(), equalTo("remote")); assertThat(followRequest.getLeaderIndex(), equalTo("logs-20190101")); - assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); + assertThat(followRequest.getFollowRequest().getFollowerIndex(), equalTo("logs-20190101")); successHandler.run(); } @@ -274,12 +274,12 @@ public class AutoFollowCoordinatorTests extends ESTestCase { @Override void createAndFollow(Map headers, - ResumeFollowAction.Request followRequest, + PutFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { assertThat(followRequest.getLeaderCluster(), equalTo("remote")); assertThat(followRequest.getLeaderIndex(), equalTo("logs-20190101")); - assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); + assertThat(followRequest.getFollowRequest().getFollowerIndex(), equalTo("logs-20190101")); failureHandler.accept(failure); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java index f86594b3b69..b8c1d5511df 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java @@ -5,10 +5,13 @@ */ package org.elasticsearch.xpack.ccr.action; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; -public class PutFollowActionRequestTests extends AbstractStreamableTestCase { +import java.io.IOException; + +public class PutFollowActionRequestTests extends AbstractStreamableXContentTestCase { @Override protected PutFollowAction.Request createBlankInstance() { @@ -17,6 +20,20 @@ public class PutFollowActionRequestTests extends AbstractStreamableTestCase validate(request, null, null, null, null)); - assertThat(e.getMessage(), equalTo("leader index [leader_cluster:index1] does not exist")); - } - { - // should fail, because follow index does not exist - IndexMetaData leaderIMD = createIMD("index1", 5, Settings.EMPTY, emptyMap()); - Exception e = expectThrows(IllegalArgumentException.class, - () -> validate(request, leaderIMD, null, null, null)); - assertThat(e.getMessage(), equalTo("follow index [index2] does not exist")); - } { IndexMetaData leaderIMD = createIMD("index1", 5, Settings.EMPTY, null); IndexMetaData followIMD = createIMD("index2", 5, Settings.EMPTY, null); @@ -83,7 +70,7 @@ public class TransportResumeFollowActionTests extends ESTestCase { IndexMetaData leaderIMD = createIMD("index1", 5, Settings.EMPTY, null); IndexMetaData followIMD = createIMD("index2", 5, Settings.EMPTY, customMetaData); Exception e = expectThrows(IllegalArgumentException.class, () -> validate(request, leaderIMD, followIMD, UUIDs, null)); - assertThat(e.getMessage(), equalTo("leader index [leader_cluster:index1] does not have soft deletes enabled")); + assertThat(e.getMessage(), equalTo("leader index [index1] does not have soft deletes enabled")); } { // should fail because the follower index does not have soft deletes enabled diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 5fdb13871b5..291fc853335 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -12,14 +12,29 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.FOLLOWER_INDEX_FIELD; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_BATCH_OPERATION_COUNT; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_BATCH_SIZE; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_CONCURRENT_READ_BATCHES; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_CONCURRENT_WRITE_BATCHES; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_RETRY_DELAY_FIELD; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_BUFFER_SIZE; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.POLL_TIMEOUT; + public final class PutFollowAction extends Action { public static final PutFollowAction INSTANCE = new PutFollowAction(); @@ -34,25 +49,97 @@ public final class PutFollowAction extends Action { return new Response(); } - public static class Request extends AcknowledgedRequest implements IndicesRequest { + public static class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { - private ResumeFollowAction.Request followRequest; + private static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster"); + private static final ParseField LEADER_INDEX_FIELD = new ParseField("leader_index"); - public Request(ResumeFollowAction.Request followRequest) { - this.followRequest = Objects.requireNonNull(followRequest); + private static final ObjectParser PARSER = new ObjectParser<>(NAME, () -> { + Request request = new Request(); + request.setFollowRequest(new ResumeFollowAction.Request()); + return request; + }); + + static { + PARSER.declareString(Request::setLeaderCluster, LEADER_CLUSTER_FIELD); + PARSER.declareString(Request::setLeaderIndex, LEADER_INDEX_FIELD); + PARSER.declareString((request, value) -> request.followRequest.setFollowerIndex(value), FOLLOWER_INDEX_FIELD); + PARSER.declareInt((request, value) -> request.followRequest.setMaxBatchOperationCount(value), MAX_BATCH_OPERATION_COUNT); + PARSER.declareInt((request, value) -> request.followRequest.setMaxConcurrentReadBatches(value), MAX_CONCURRENT_READ_BATCHES); + PARSER.declareField( + (request, value) -> request.followRequest.setMaxBatchSize(value), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), + MAX_BATCH_SIZE, + ObjectParser.ValueType.STRING); + PARSER.declareInt((request, value) -> request.followRequest.setMaxConcurrentWriteBatches(value), MAX_CONCURRENT_WRITE_BATCHES); + PARSER.declareInt((request, value) -> request.followRequest.setMaxWriteBufferSize(value), MAX_WRITE_BUFFER_SIZE); + PARSER.declareField( + (request, value) -> request.followRequest.setMaxRetryDelay(value), + (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY_FIELD.getPreferredName()), + MAX_RETRY_DELAY_FIELD, + ObjectParser.ValueType.STRING); + PARSER.declareField( + (request, value) -> request.followRequest.setPollTimeout(value), + (p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()), + POLL_TIMEOUT, + ObjectParser.ValueType.STRING); } - public Request() { + public static Request fromXContent(final XContentParser parser, final String followerIndex) throws IOException { + Request request = PARSER.parse(parser, followerIndex); + if (followerIndex != null) { + if (request.getFollowRequest().getFollowerIndex() == null) { + request.getFollowRequest().setFollowerIndex(followerIndex); + } else { + if (request.getFollowRequest().getFollowerIndex().equals(followerIndex) == false) { + throw new IllegalArgumentException("provided follower_index is not equal"); + } + } + } + return request; + } + private String leaderCluster; + private String leaderIndex; + private ResumeFollowAction.Request followRequest; + + public Request() { + } + + public String getLeaderCluster() { + return leaderCluster; + } + + public void setLeaderCluster(String leaderCluster) { + this.leaderCluster = leaderCluster; + } + + public String getLeaderIndex() { + return leaderIndex; + } + + public void setLeaderIndex(String leaderIndex) { + this.leaderIndex = leaderIndex; } public ResumeFollowAction.Request getFollowRequest() { return followRequest; } + public void setFollowRequest(ResumeFollowAction.Request followRequest) { + this.followRequest = followRequest; + } + @Override public ActionRequestValidationException validate() { - return followRequest.validate(); + ActionRequestValidationException e = followRequest.validate(); + if (leaderCluster == null) { + e = addValidationError(LEADER_CLUSTER_FIELD.getPreferredName() + " is missing", e); + } + if (leaderIndex == null) { + e = addValidationError(LEADER_INDEX_FIELD.getPreferredName() + " is missing", e); + } + return e; } @Override @@ -68,6 +155,8 @@ public final class PutFollowAction extends Action { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); + leaderCluster = in.readString(); + leaderIndex = in.readString(); followRequest = new ResumeFollowAction.Request(); followRequest.readFrom(in); } @@ -75,20 +164,36 @@ public final class PutFollowAction extends Action { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); + out.writeString(leaderCluster); + out.writeString(leaderIndex); followRequest.writeTo(out); } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); + builder.field(LEADER_INDEX_FIELD.getPreferredName(), leaderIndex); + followRequest.toXContentFragment(builder, params); + } + builder.endObject(); + return builder; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(followRequest, request.followRequest); + return Objects.equals(leaderCluster, request.leaderCluster) && + Objects.equals(leaderIndex, request.leaderIndex) && + Objects.equals(followRequest, request.followRequest); } @Override public int hashCode() { - return Objects.hash(followRequest); + return Objects.hash(leaderCluster, leaderIndex, followRequest); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index 02ed7a1a5fb..127ccf7610f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -43,21 +43,17 @@ public final class ResumeFollowAction extends Action { public static class Request extends ActionRequest implements ToXContentObject { - private static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster"); - private static final ParseField LEADER_INDEX_FIELD = new ParseField("leader_index"); - private static final ParseField FOLLOWER_INDEX_FIELD = new ParseField("follower_index"); - private static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); - private static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); - private static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); - private static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); - private static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); - private static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay"); - private static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); - private static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + static final ParseField FOLLOWER_INDEX_FIELD = new ParseField("follower_index"); + static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); + static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); + static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); + static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); + static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay"); + static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); + static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); static { - PARSER.declareString(Request::setLeaderCluster, LEADER_CLUSTER_FIELD); - PARSER.declareString(Request::setLeaderIndex, LEADER_INDEX_FIELD); PARSER.declareString(Request::setFollowerIndex, FOLLOWER_INDEX_FIELD); PARSER.declareInt(Request::setMaxBatchOperationCount, MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(Request::setMaxConcurrentReadBatches, MAX_CONCURRENT_READ_BATCHES); @@ -94,26 +90,6 @@ public final class ResumeFollowAction extends Action { return request; } - private String leaderCluster; - - public String getLeaderCluster() { - return leaderCluster; - } - - public void setLeaderCluster(String leaderCluster) { - this.leaderCluster = leaderCluster; - } - - private String leaderIndex; - - public String getLeaderIndex() { - return leaderIndex; - } - - public void setLeaderIndex(String leaderIndex) { - this.leaderIndex = leaderIndex; - } - private String followerIndex; public String getFollowerIndex() { @@ -201,12 +177,6 @@ public final class ResumeFollowAction extends Action { public ActionRequestValidationException validate() { ActionRequestValidationException e = null; - if (leaderCluster == null) { - e = addValidationError(LEADER_CLUSTER_FIELD.getPreferredName() + " is missing", e); - } - if (leaderIndex == null) { - e = addValidationError(LEADER_INDEX_FIELD.getPreferredName() + " is missing", e); - } if (followerIndex == null) { e = addValidationError(FOLLOWER_INDEX_FIELD.getPreferredName() + " is missing", e); } @@ -242,8 +212,6 @@ public final class ResumeFollowAction extends Action { @Override public void readFrom(final StreamInput in) throws IOException { super.readFrom(in); - leaderCluster = in.readString(); - leaderIndex = in.readString(); followerIndex = in.readString(); maxBatchOperationCount = in.readOptionalVInt(); maxConcurrentReadBatches = in.readOptionalVInt(); @@ -257,8 +225,6 @@ public final class ResumeFollowAction extends Action { @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(leaderCluster); - out.writeString(leaderIndex); out.writeString(followerIndex); out.writeOptionalVInt(maxBatchOperationCount); out.writeOptionalVInt(maxConcurrentReadBatches); @@ -273,35 +239,37 @@ public final class ResumeFollowAction extends Action { public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); { - builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); - builder.field(LEADER_INDEX_FIELD.getPreferredName(), leaderIndex); - builder.field(FOLLOWER_INDEX_FIELD.getPreferredName(), followerIndex); - if (maxBatchOperationCount != null) { - builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); - } - if (maxBatchSize != null) { - builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); - } - if (maxWriteBufferSize != null) { - builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); - } - if (maxConcurrentReadBatches != null) { - builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); - } - if (maxConcurrentWriteBatches != null) { - builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); - } - if (maxRetryDelay != null) { - builder.field(MAX_RETRY_DELAY_FIELD.getPreferredName(), maxRetryDelay.getStringRep()); - } - if (pollTimeout != null) { - builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); - } + toXContentFragment(builder, params); } builder.endObject(); return builder; } + void toXContentFragment(final XContentBuilder builder, final Params params) throws IOException { + builder.field(FOLLOWER_INDEX_FIELD.getPreferredName(), followerIndex); + if (maxBatchOperationCount != null) { + builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); + } + if (maxBatchSize != null) { + builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); + } + if (maxWriteBufferSize != null) { + builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); + } + if (maxConcurrentReadBatches != null) { + builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); + } + if (maxConcurrentWriteBatches != null) { + builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); + } + if (maxRetryDelay != null) { + builder.field(MAX_RETRY_DELAY_FIELD.getPreferredName(), maxRetryDelay.getStringRep()); + } + if (pollTimeout != null) { + builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); + } + } + @Override public boolean equals(final Object o) { if (this == o) return true; @@ -314,16 +282,12 @@ public final class ResumeFollowAction extends Action { Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && Objects.equals(maxRetryDelay, request.maxRetryDelay) && Objects.equals(pollTimeout, request.pollTimeout) && - Objects.equals(leaderCluster, request.leaderCluster) && - Objects.equals(leaderIndex, request.leaderIndex) && Objects.equals(followerIndex, request.followerIndex); } @Override public int hashCode() { return Objects.hash( - leaderCluster, - leaderIndex, followerIndex, maxBatchOperationCount, maxConcurrentReadBatches, From 90fd15bb563160e59b582f359e764aa99a966bc6 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 23 Oct 2018 11:14:31 -0700 Subject: [PATCH 04/67] Mute RollupIndexerIndexingTests#testRandomizedDateHisto as we await a fix. --- .../xpack/rollup/job/RollupIndexerIndexingTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 4784e75d646..c3add626d67 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -421,6 +421,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34762") public void testRandomizedDateHisto() throws Exception { String rollupIndex = randomAlphaOfLengthBetween(5, 10); From e242fd2e42fb6759b2bf4c0a73451e814b1a0e20 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 23 Oct 2018 14:23:29 -0400 Subject: [PATCH 05/67] CCR: Add TransportService closed to retryable errors (#34722) Both testFollowIndexAndCloseNode and testFailOverOnFollower failed because they responded to the FollowTask a TransportService closed exception which is currently considered as a fatal error. This behavior is not desirable since a closing node can throw that exception, and we should retry in that case. This change adds TransportService closed error to the list of retryable errors. Closes #34694 --- .../xpack/ccr/action/ShardFollowNodeTask.java | 8 +++++++- .../org/elasticsearch/xpack/ccr/IndexFollowingIT.java | 2 -- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index b156a41896a..7d1e96c5d7c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -28,6 +28,8 @@ import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.transport.NodeDisconnectedException; +import org.elasticsearch.transport.NodeNotConnectedException; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsResponse; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; @@ -371,6 +373,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { scheduler.accept(TimeValue.timeValueMillis(delay), task); } else { fatalException = ExceptionsHelper.convertToElastic(e); + LOGGER.warn("shard follow task encounter non-retryable error", e); } } @@ -399,7 +402,10 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { actual instanceof AlreadyClosedException || actual instanceof ElasticsearchSecurityException || // If user does not have sufficient privileges actual instanceof ClusterBlockException || // If leader index is closed or no elected master - actual instanceof IndexClosedException; // If follow index is closed + actual instanceof IndexClosedException || // If follow index is closed + actual instanceof NodeDisconnectedException || + actual instanceof NodeNotConnectedException || + (actual.getMessage() != null && actual.getMessage().contains("TransportService is closed")); } // These methods are protected for testing purposes: diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index d0cc41f22c2..926b439ea35 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -270,7 +270,6 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertMaxSeqNoOfUpdatesIsTransferred(resolveLeaderIndex("index1"), resolveFollowerIndex("index2"), numberOfShards); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34696") public void testFollowIndexAndCloseNode() throws Exception { getFollowerCluster().ensureAtLeastNumDataNodes(3); String leaderIndexSettings = getIndexSettings(3, 1, singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); @@ -587,7 +586,6 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(followerClient().prepareSearch("index2").get().getHits().getTotalHits(), equalTo(2L)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34696") public void testFailOverOnFollower() throws Exception { int numberOfReplicas = between(1, 2); getFollowerCluster().startMasterOnlyNode(); From a8e1ee34ca040212a2677c2d46cdc5b8fb32dcad Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 23 Oct 2018 13:28:44 -0500 Subject: [PATCH 06/67] ingest: document fields that support templating (#34536) This change also updates many of the examples to use ecs as the example. Some additional minor improvements are also included. Part of #33188 --- docs/reference/ingest/ingest-node.asciidoc | 63 +++++++++++----------- 1 file changed, 32 insertions(+), 31 deletions(-) diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 13dd3de73bd..d23fc8de12b 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -776,16 +776,16 @@ Accepts a single value or an array of values. [options="header"] |====== | Name | Required | Default | Description -| `field` | yes | - | The field to be appended to -| `value` | yes | - | The value to be appended +| `field` | yes | - | The field to be appended to. Supports <>. +| `value` | yes | - | The value to be appended. Supports <>. |====== [source,js] -------------------------------------------------- { "append": { - "field": "field1", - "value": ["item2", "item3", "item4"] + "field": "tags", + "value": ["production", "{{app}}", "{{owner}}"] } } -------------------------------------------------- @@ -812,7 +812,7 @@ the field is not a supported format or resultant value exceeds 2^63. -------------------------------------------------- { "bytes": { - "field": "foo" + "field": "file.size" } } -------------------------------------------------- @@ -850,7 +850,7 @@ still be updated with the unconverted field value. -------------------------------------------------- { "convert": { - "field" : "foo", + "field" : "url.port", "type": "integer" } } @@ -874,8 +874,8 @@ in the same order they were defined as part of the processor definition. | `field` | yes | - | The field to get the date from. | `target_field` | no | @timestamp | The field that will hold the parsed date. | `formats` | yes | - | An array of the expected date formats. Can be a Joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, or TAI64N. -| `timezone` | no | UTC | The timezone to use when parsing the date. -| `locale` | no | ENGLISH | The locale to use when parsing the date, relevant when parsing month names or week days. +| `timezone` | no | UTC | The timezone to use when parsing the date. Supports <>. +| `locale` | no | ENGLISH | The locale to use when parsing the date, relevant when parsing month names or week days. Supports <>. |====== Here is an example that adds the parsed date to the `timestamp` field based on the `initial_date` field: @@ -913,8 +913,8 @@ the timezone and locale values. "field" : "initial_date", "target_field" : "timestamp", "formats" : ["ISO8601"], - "timezone" : "{{ my_timezone }}", - "locale" : "{{ my_locale }}" + "timezone" : "{{my_timezone}}", + "locale" : "{{my_locale}}" } } ] @@ -1059,12 +1059,12 @@ understands this to mean `2016-04-01` as is explained in the <>. +| `date_rounding` | yes | - | How to round the date when formatting the date into the index name. Valid values are: `y` (year), `M` (month), `w` (week), `d` (day), `h` (hour), `m` (minute) and `s` (second). Supports <>. | `date_formats` | no | yyyy-MM-dd'T'HH:mm:ss.SSSZ | An array of the expected date formats for parsing dates / timestamps in the document being preprocessed. Can be a Joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, or TAI64N. | `timezone` | no | UTC | The timezone to use when parsing the date and when date math index supports resolves expressions into concrete index names. | `locale` | no | ENGLISH | The locale to use when parsing the date from the document being preprocessed, relevant when parsing month names or week days. -| `index_name_format` | no | yyyy-MM-dd | The format to be used when printing the parsed date into the index name. An valid Joda pattern is expected here. +| `index_name_format` | no | yyyy-MM-dd | The format to be used when printing the parsed date into the index name. An valid Joda pattern is expected here. Supports <>. |====== [[dissect-processor]] @@ -1405,14 +1405,15 @@ to the requester. [options="header"] |====== | Name | Required | Default | Description -| `message` | yes | - | The error message of the `FailException` thrown by the processor +| `message` | yes | - | The error message thrown by the processor. Supports <>. |====== [source,js] -------------------------------------------------- { "fail": { - "message": "an error message" + "if" : "ctx.tags.contains('production') != true", + "message": "The production tag is not present, found tags: {{tags}}" } } -------------------------------------------------- @@ -2117,7 +2118,7 @@ Removes existing fields. If one field doesn't exist, an exception will be thrown [options="header"] |====== | Name | Required | Default | Description -| `field` | yes | - | Fields to be removed +| `field` | yes | - | Fields to be removed. Supports <>. | `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document |====== @@ -2127,7 +2128,7 @@ Here is an example to remove a single field: -------------------------------------------------- { "remove": { - "field": "foo" + "field": "user_agent" } } -------------------------------------------------- @@ -2139,7 +2140,7 @@ To remove multiple fields, you can use the following query: -------------------------------------------------- { "remove": { - "field": ["foo", "bar"] + "field": ["user_agent", "url"] } } -------------------------------------------------- @@ -2153,18 +2154,18 @@ Renames an existing field. If the field doesn't exist or the new name is already .Rename Options [options="header"] |====== -| Name | Required | Default | Description -| `field` | yes | - | The field to be renamed -| `target_field` | yes | - | The new name of the field -| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document +| Name | Required | Default | Description +| `field` | yes | - | The field to be renamed. Supports <>. +| `target_field` | yes | - | The new name of the field. Supports <>. +| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |====== [source,js] -------------------------------------------------- { "rename": { - "field": "foo", - "target_field": "foobar" + "field": "provider", + "target_field": "cloud.provider" } } -------------------------------------------------- @@ -2282,18 +2283,18 @@ its value will be replaced with the provided one. .Set Options [options="header"] |====== -| Name | Required | Default | Description -| `field` | yes | - | The field to insert, upsert, or update -| `value` | yes | - | The value to be set for the field -| `override`| no | true | If processor will update fields with pre-existing non-null-valued field. When set to `false`, such fields will not be touched. +| Name | Required | Default | Description +| `field` | yes | - | The field to insert, upsert, or update. Supports <>. +| `value` | yes | - | The value to be set for the field. Supports <>. +| `override` | no | true | If processor will update fields with pre-existing non-null-valued field. When set to `false`, such fields will not be touched. |====== [source,js] -------------------------------------------------- { "set": { - "field": "field1", - "value": 582.1 + "field": "host.os.name", + "value": "{{os}}" } } -------------------------------------------------- @@ -2346,7 +2347,7 @@ Throws an error when the field is not an array. -------------------------------------------------- { "sort": { - "field": "field_to_sort", + "field": "array_field_to_sort", "order": "desc" } } From 5a4866f67d3aac0580cc3b1bc692af8e1b38a3be Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 23 Oct 2018 11:36:24 -0700 Subject: [PATCH 07/67] Mute CharArraysTests#testCharsBeginsWith while we await a fix. --- .../src/test/java/org/elasticsearch/common/CharArraysTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java index 9283283ab08..0e3a2179463 100644 --- a/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java +++ b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java @@ -43,6 +43,7 @@ public class CharArraysTests extends ESTestCase { assertArrayEquals(expectedChars, convertedChars); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34765") public void testCharsBeginsWith() { assertFalse(CharArrays.charsBeginsWith(randomAlphaOfLength(4), null)); assertFalse(CharArrays.charsBeginsWith(null, null)); From 067d135bfdffc8e442a7b6f34ca5e20b09b4d67a Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 23 Oct 2018 13:49:25 -0500 Subject: [PATCH 08/67] ingest: document index.default_pipeline (#34500) --- docs/reference/index-modules.asciidoc | 7 +++++++ docs/reference/ingest.asciidoc | 3 +++ 2 files changed, 10 insertions(+) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 81bc96bb8f9..1442f0e3e55 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -234,6 +234,13 @@ specific index module: The length of time that a <> remains available for <>. Defaults to `60s`. + `index.default_pipeline`:: + + The default <> pipeline for this index. Index requests will fail + if the default pipeline is set and the pipeline does not exist. The default may be + overridden using the `pipeline` parameter. The special pipeline name `_none` indicates + no ingest pipeline should be run. + [float] === Settings in other index modules diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index 772013534b6..6fa2e8c796d 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -35,6 +35,9 @@ PUT my-index/_doc/my-id?pipeline=my_pipeline_id // CONSOLE // TEST[catch:bad_request] +An index may also declare a <> that will be used in the +absence of the `pipeline` parameter. + See <> for more information about creating, adding, and deleting pipelines. -- From 299d044bfceaf5391cda1bc19f92f8bc2cbf87ae Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Tue, 23 Oct 2018 16:01:01 -0400 Subject: [PATCH 09/67] Collapse pipeline aggs into single package (#34658) - Restrict visibility of Aggregators and Factories - Move PipelineAggregatorBuilders up a level so it is consistent with AggregatorBuilders - Checkstyle line length fixes for a few classes - Minor odds/ends (swapping to method references, formatting, etc) --- .../resources/checkstyle_suppressions.xml | 3 - .../client/RestHighLevelClient.java | 20 ++--- .../migration/migrate_7_0/java.asciidoc | 6 ++ .../expression/MoreExpressionTests.java | 2 +- .../painless/PainlessPlugin.java | 2 +- .../spi/org.elasticsearch.aggs.movfn.txt | 2 +- .../elasticsearch/plugins/SearchPlugin.java | 4 +- .../elasticsearch/script/ScriptModule.java | 2 +- .../elasticsearch/search/SearchModule.java | 84 +++++++++---------- .../PipelineAggregatorBuilders.java | 56 ++++++------- .../AvgBucketPipelineAggregationBuilder.java | 5 +- .../avg => }/AvgBucketPipelineAggregator.java | 7 +- .../BucketMetricValue.java | 2 +- .../BucketMetricsParser.java | 3 +- ...cketMetricsPipelineAggregationBuilder.java | 4 +- .../BucketMetricsPipelineAggregator.java | 6 +- ...ucketScriptPipelineAggregationBuilder.java | 4 +- .../BucketScriptPipelineAggregator.java | 8 +- ...ketSelectorPipelineAggregationBuilder.java | 4 +- .../BucketSelectorPipelineAggregator.java | 10 +-- .../BucketSortPipelineAggregationBuilder.java | 4 +- .../BucketSortPipelineAggregator.java | 8 +- ...mulativeSumPipelineAggregationBuilder.java | 5 +- .../CumulativeSumPipelineAggregator.java | 8 +- .../pipeline/{derivative => }/Derivative.java | 6 +- .../DerivativePipelineAggregationBuilder.java | 4 +- .../DerivativePipelineAggregator.java | 7 +- .../{movavg/models => }/EwmaModel.java | 6 +- .../extended => }/ExtendedStatsBucket.java | 2 +- .../ExtendedStatsBucketParser.java | 3 +- ...StatsBucketPipelineAggregationBuilder.java | 6 +- ...ExtendedStatsBucketPipelineAggregator.java | 6 +- .../{movavg/models => }/HoltLinearModel.java | 8 +- .../{movavg/models => }/HoltWintersModel.java | 16 ++-- .../InternalBucketMetricValue.java | 3 +- .../{derivative => }/InternalDerivative.java | 6 +- .../InternalExtendedStatsBucket.java | 3 +- .../InternalPercentilesBucket.java | 5 +- .../pipeline/InternalSimpleValue.java | 2 +- .../stats => }/InternalStatsBucket.java | 3 +- .../{movavg/models => }/LinearModel.java | 4 +- .../MaxBucketPipelineAggregationBuilder.java | 5 +- .../max => }/MaxBucketPipelineAggregator.java | 7 +- .../MinBucketPipelineAggregationBuilder.java | 5 +- .../min => }/MinBucketPipelineAggregator.java | 9 +- .../{movavg/models => }/MovAvgModel.java | 2 +- .../models => }/MovAvgModelBuilder.java | 2 +- .../MovAvgPipelineAggregationBuilder.java | 13 +-- .../MovAvgPipelineAggregator.java | 15 ++-- .../MovFnPipelineAggregationBuilder.java | 4 +- .../{movfn => }/MovFnPipelineAggregator.java | 5 +- .../{movfn => }/MovingFunctionScript.java | 2 +- .../pipeline/{movfn => }/MovingFunctions.java | 2 +- .../ParsedBucketMetricValue.java | 4 +- .../{derivative => }/ParsedDerivative.java | 5 +- .../ParsedExtendedStatsBucket.java | 2 +- .../ParsedPercentilesBucket.java | 2 +- .../stats => }/ParsedStatsBucket.java | 2 +- .../percentile => }/PercentilesBucket.java | 2 +- ...tilesBucketPipelineAggregationBuilder.java | 8 +- .../PercentilesBucketPipelineAggregator.java | 8 +- .../SerialDiffPipelineAggregationBuilder.java | 5 +- .../SerialDiffPipelineAggregator.java | 8 +- .../pipeline/SiblingPipelineAggregator.java | 22 ++--- .../{movavg/models => }/SimpleModel.java | 4 +- .../SimulatedAnealingMinimizer.java | 3 +- .../stats => }/StatsBucket.java | 2 +- ...StatsBucketPipelineAggregationBuilder.java | 5 +- .../StatsBucketPipelineAggregator.java | 6 +- .../SumBucketPipelineAggregationBuilder.java | 5 +- .../sum => }/SumBucketPipelineAggregator.java | 7 +- .../search/SearchModuleTests.java | 10 +-- .../aggregations/AggregationsTests.java | 8 +- .../AggregatorFactoriesBuilderTests.java | 2 +- .../AggregatorFactoriesTests.java | 3 +- .../search/aggregations/MetaDataIT.java | 4 +- .../bucket/terms/TermsAggregatorTests.java | 4 +- .../metrics/InternalStatsBucketTests.java | 4 +- .../AbstractBucketMetricsTestCase.java | 3 +- .../avg => }/AvgBucketAggregatorTests.java | 4 +- .../aggregations/pipeline/AvgBucketIT.java | 2 +- .../{bucketmetrics => }/AvgBucketTests.java | 10 +-- .../aggregations/pipeline/BucketScriptIT.java | 2 +- .../pipeline/BucketScriptTests.java | 1 - .../pipeline/BucketSelectorIT.java | 4 +- .../pipeline/BucketSelectorTests.java | 1 - .../{bucketsort => }/BucketSortIT.java | 5 +- .../{bucketsort => }/BucketSortTests.java | 3 +- .../CumulativeSumAggregatorTests.java | 2 - .../pipeline/CumulativeSumTests.java | 1 - .../pipeline/DateDerivativeIT.java | 3 +- .../aggregations/pipeline/DerivativeIT.java | 6 +- .../pipeline/DerivativeTests.java | 1 - .../pipeline/ExtendedStatsBucketIT.java | 6 +- .../ExtendedStatsBucketTests.java | 4 +- .../InternalBucketMetricValueTests.java | 4 +- .../InternalDerivativeTests.java | 4 +- .../InternalExtendedStatsBucketTests.java | 11 ++- .../InternalPercentilesBucketTests.java | 4 +- .../aggregations/pipeline/MaxBucketIT.java | 5 +- .../{bucketmetrics => }/MaxBucketTests.java | 4 +- .../aggregations/pipeline/MinBucketIT.java | 3 +- .../{bucketmetrics => }/MinBucketTests.java | 4 +- .../pipeline/{moving/avg => }/MovAvgIT.java | 51 +++++------ .../{moving/avg => }/MovAvgTests.java | 16 ++-- .../{moving/avg => }/MovAvgUnitTests.java | 14 ++-- ...eAggregationBuilderSerializationTests.java | 3 +- .../pipeline/{movfn => }/MovFnUnitTests.java | 5 +- .../MovFnWhitelistedFunctionTests.java | 3 +- .../pipeline/PercentilesBucketIT.java | 3 +- .../PercentilesBucketTests.java | 4 +- .../{serialdiff => }/SerialDiffIT.java | 14 ++-- .../pipeline/SerialDifferenceTests.java | 1 - .../aggregations/pipeline/StatsBucketIT.java | 3 +- .../{bucketmetrics => }/StatsBucketTests.java | 4 +- .../aggregations/pipeline/SumBucketIT.java | 2 +- .../{bucketmetrics => }/SumBucketTests.java | 4 +- .../script/MockScriptEngine.java | 4 +- .../test/InternalAggregationTestCase.java | 20 ++--- .../xpack/sql/querydsl/agg/AggFilter.java | 2 +- 120 files changed, 344 insertions(+), 461 deletions(-) rename server/src/main/java/org/elasticsearch/search/aggregations/{pipeline => }/PipelineAggregatorBuilders.java (63%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/avg => }/AvgBucketPipelineAggregationBuilder.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/avg => }/AvgBucketPipelineAggregator.java (83%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/BucketMetricValue.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/BucketMetricsParser.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/BucketMetricsPipelineAggregationBuilder.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/BucketMetricsPipelineAggregator.java (94%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketscript => }/BucketScriptPipelineAggregationBuilder.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketscript => }/BucketScriptPipelineAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketselector => }/BucketSelectorPipelineAggregationBuilder.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketselector => }/BucketSelectorPipelineAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketsort => }/BucketSortPipelineAggregationBuilder.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketsort => }/BucketSortPipelineAggregator.java (94%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{cumulativesum => }/CumulativeSumPipelineAggregationBuilder.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{cumulativesum => }/CumulativeSumPipelineAggregator.java (91%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{derivative => }/Derivative.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{derivative => }/DerivativePipelineAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{derivative => }/DerivativePipelineAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg/models => }/EwmaModel.java (94%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats/extended => }/ExtendedStatsBucket.java (91%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats/extended => }/ExtendedStatsBucketParser.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats/extended => }/ExtendedStatsBucketPipelineAggregationBuilder.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats/extended => }/ExtendedStatsBucketPipelineAggregator.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg/models => }/HoltLinearModel.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg/models => }/HoltWintersModel.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/InternalBucketMetricValue.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{derivative => }/InternalDerivative.java (91%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats/extended => }/InternalExtendedStatsBucket.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/percentile => }/InternalPercentilesBucket.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats => }/InternalStatsBucket.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg/models => }/LinearModel.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/max => }/MaxBucketPipelineAggregationBuilder.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/max => }/MaxBucketPipelineAggregator.java (84%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/min => }/MinBucketPipelineAggregationBuilder.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/min => }/MinBucketPipelineAggregator.java (81%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg/models => }/MovAvgModel.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg/models => }/MovAvgModelBuilder.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg => }/MovAvgPipelineAggregationBuilder.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg => }/MovAvgPipelineAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movfn => }/MovFnPipelineAggregationBuilder.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movfn => }/MovFnPipelineAggregator.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movfn => }/MovingFunctionScript.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movfn => }/MovingFunctions.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/ParsedBucketMetricValue.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{derivative => }/ParsedDerivative.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats/extended => }/ParsedExtendedStatsBucket.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/percentile => }/ParsedPercentilesBucket.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats => }/ParsedStatsBucket.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/percentile => }/PercentilesBucket.java (91%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/percentile => }/PercentilesBucketPipelineAggregationBuilder.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/percentile => }/PercentilesBucketPipelineAggregator.java (86%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{serialdiff => }/SerialDiffPipelineAggregationBuilder.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{serialdiff => }/SerialDiffPipelineAggregator.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg/models => }/SimpleModel.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{movavg => }/SimulatedAnealingMinimizer.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats => }/StatsBucket.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats => }/StatsBucketPipelineAggregationBuilder.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats => }/StatsBucketPipelineAggregator.java (86%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/sum => }/SumBucketPipelineAggregationBuilder.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/sum => }/SumBucketPipelineAggregator.java (82%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/AbstractBucketMetricsTestCase.java (91%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/avg => }/AvgBucketAggregatorTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/AvgBucketTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketsort => }/BucketSortIT.java (98%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketsort => }/BucketSortTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/ExtendedStatsBucketTests.java (94%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/InternalBucketMetricValueTests.java (95%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{derivative => }/InternalDerivativeTests.java (95%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/stats/extended => }/InternalExtendedStatsBucketTests.java (79%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics/percentile => }/InternalPercentilesBucketTests.java (97%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/MaxBucketTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/MinBucketTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{moving/avg => }/MovAvgIT.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{moving/avg => }/MovAvgTests.java (88%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{moving/avg => }/MovAvgUnitTests.java (97%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{movfn => }/MovFnPipelineAggregationBuilderSerializationTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{movfn => }/MovFnUnitTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{movfn => }/MovFnWhitelistedFunctionTests.java (99%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/PercentilesBucketTests.java (94%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{serialdiff => }/SerialDiffIT.java (94%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/StatsBucketTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/pipeline/{bucketmetrics => }/SumBucketTests.java (92%) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 212b407f02d..df90fe70497 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -614,9 +614,6 @@ - - - diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 86782b364a0..eb041c77c54 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -156,16 +156,16 @@ import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.ParsedBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.ParsedPercentilesBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ParsedExtendedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.ParsedBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.ParsedPercentilesBucket; +import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ParsedExtendedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ParsedDerivative; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index 7d68ff2fb57..4357b3fa728 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -22,6 +22,12 @@ appropriate request directly. * All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages were moved to a single `org.elasticsearch.search.aggregations.metrics` package. +* All classes present in `org.elasticsearch.search.aggregations.pipeline.*` packages +were moved to a single `org.elasticsearch.search.aggregations.pipeline` package. In +addition, `org.elasticsearch.search.aggregations.pipeline.PipelineAggregationBuilders` +was moved to `org.elasticsearch.search.aggregations.PipelineAggregationBuilders` + + [float] ==== `Retry.withBackoff` methods with `Settings` removed diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index fdf9e09e07f..7b1e53a336c 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -53,7 +53,7 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDI import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketScript; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 30573786467..1773b3445c4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -40,7 +40,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; +import org.elasticsearch.search.aggregations.pipeline.MovingFunctionScript; import java.util.ArrayList; import java.util.Arrays; diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt index a120b73820a..87f6f7d9aea 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt @@ -19,7 +19,7 @@ # This file contains a whitelist for the Moving Function pipeline aggregator in core -class org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions { +class org.elasticsearch.search.aggregations.pipeline.MovingFunctions { double max(double[]) double min(double[]) double sum(double[]) diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index b134db44517..c8d4e2e6209 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -42,8 +42,8 @@ import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.MovAvgModel; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.rescore.RescorerBuilder; diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 1c53ef133de..8e9d162c52e 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -22,7 +22,7 @@ package org.elasticsearch.script; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; +import org.elasticsearch.search.aggregations.pipeline.MovingFunctionScript; import java.util.Collections; import java.util.HashMap; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 3032f618c2f..47943a92b00 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -185,48 +185,48 @@ import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvg; import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.InternalPercentilesBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.InternalStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.InternalExtendedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketselector.BucketSelectorPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketselector.BucketSelectorPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketsort.BucketSortPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketsort.BucketSortPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.derivative.InternalDerivative; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltWintersModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovFnPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovFnPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucket; +import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketParser; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalExtendedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.BucketSelectorPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketSelectorPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.BucketSortPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketSortPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.CumulativeSumPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.CumulativeSumPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalDerivative; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.EwmaModel; +import org.elasticsearch.search.aggregations.pipeline.HoltLinearModel; +import org.elasticsearch.search.aggregations.pipeline.HoltWintersModel; +import org.elasticsearch.search.aggregations.pipeline.LinearModel; +import org.elasticsearch.search.aggregations.pipeline.MovAvgModel; +import org.elasticsearch.search.aggregations.pipeline.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.MovFnPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MovFnPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.SerialDiffPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.SerialDiffPipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregatorBuilders.java similarity index 63% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java rename to server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregatorBuilders.java index ce87dd797d6..a5f89f82f6b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregatorBuilders.java @@ -17,24 +17,24 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline; +package org.elasticsearch.search.aggregations; import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketselector.BucketSelectorPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketsort.BucketSortPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovFnPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketSelectorPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketSortPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.CumulativeSumPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MaxBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MinBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MovFnPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.SerialDiffPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.SumBucketPipelineAggregationBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import java.util.List; @@ -69,13 +69,11 @@ public final class PipelineAggregatorBuilders { return new StatsBucketPipelineAggregationBuilder(name, bucketsPath); } - public static ExtendedStatsBucketPipelineAggregationBuilder extendedStatsBucket(String name, - String bucketsPath) { + public static ExtendedStatsBucketPipelineAggregationBuilder extendedStatsBucket(String name, String bucketsPath) { return new ExtendedStatsBucketPipelineAggregationBuilder(name, bucketsPath); } - public static PercentilesBucketPipelineAggregationBuilder percentilesBucket(String name, - String bucketsPath) { + public static PercentilesBucketPipelineAggregationBuilder percentilesBucket(String name, String bucketsPath) { return new PercentilesBucketPipelineAggregationBuilder(name, bucketsPath); } @@ -87,23 +85,19 @@ public final class PipelineAggregatorBuilders { return new MovAvgPipelineAggregationBuilder(name, bucketsPath); } - public static BucketScriptPipelineAggregationBuilder bucketScript(String name, - Map bucketsPathsMap, Script script) { + public static BucketScriptPipelineAggregationBuilder bucketScript(String name, Map bucketsPathsMap, Script script) { return new BucketScriptPipelineAggregationBuilder(name, bucketsPathsMap, script); } - public static BucketScriptPipelineAggregationBuilder bucketScript(String name, Script script, - String... bucketsPaths) { + public static BucketScriptPipelineAggregationBuilder bucketScript(String name, Script script, String... bucketsPaths) { return new BucketScriptPipelineAggregationBuilder(name, script, bucketsPaths); } - public static BucketSelectorPipelineAggregationBuilder bucketSelector(String name, - Map bucketsPathsMap, Script script) { + public static BucketSelectorPipelineAggregationBuilder bucketSelector(String name, Map bucketsPathsMap, Script script) { return new BucketSelectorPipelineAggregationBuilder(name, bucketsPathsMap, script); } - public static BucketSelectorPipelineAggregationBuilder bucketSelector(String name, Script script, - String... bucketsPaths) { + public static BucketSelectorPipelineAggregationBuilder bucketSelector(String name, Script script, String... bucketsPaths) { return new BucketSelectorPipelineAggregationBuilder(name, script, bucketsPaths); } @@ -111,8 +105,7 @@ public final class PipelineAggregatorBuilders { return new BucketSortPipelineAggregationBuilder(name, sorts); } - public static CumulativeSumPipelineAggregationBuilder cumulativeSum(String name, - String bucketsPath) { + public static CumulativeSumPipelineAggregationBuilder cumulativeSum(String name, String bucketsPath) { return new CumulativeSumPipelineAggregationBuilder(name, bucketsPath); } @@ -120,8 +113,7 @@ public final class PipelineAggregatorBuilders { return new SerialDiffPipelineAggregationBuilder(name, bucketsPath); } - public static MovFnPipelineAggregationBuilder movingFunction(String name, Script script, - String bucketsPaths, int window) { + public static MovFnPipelineAggregationBuilder movingFunction(String name, Script script, String bucketsPaths, int window) { return new MovFnPipelineAggregationBuilder(name, bucketsPaths, script, window); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java index d9aa2ae0ebc..10b99bb48fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java @@ -17,14 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregator.java similarity index 83% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregator.java index 776862a48d5..4bb85abd540 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregator.java @@ -17,15 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; import java.io.IOException; import java.util.List; @@ -35,7 +32,7 @@ public class AvgBucketPipelineAggregator extends BucketMetricsPipelineAggregator private int count = 0; private double sum = 0; - protected AvgBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format, + AvgBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format, Map metaData) { super(name, bucketsPaths, gapPolicy, format, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricValue.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricValue.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricValue.java index be22679a4e1..b6269e8161d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricValue.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java index 3f29a3bfdc0..0e348563235 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsParser.java @@ -17,13 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregationBuilder.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregationBuilder.java index c77922eff2a..27da9dea530 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,9 +27,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.MultiBucketAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.Collection; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java similarity index 94% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java index 981b21346ad..0acd5137520 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipelineAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -47,7 +47,7 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg protected final DocValueFormat format; protected final GapPolicy gapPolicy; - protected BucketMetricsPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format, + BucketMetricsPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat format, Map metaData) { super(name, bucketsPaths, metaData); this.gapPolicy = gapPolicy; @@ -57,7 +57,7 @@ public abstract class BucketMetricsPipelineAggregator extends SiblingPipelineAgg /** * Read from a stream. */ - protected BucketMetricsPipelineAggregator(StreamInput in) throws IOException { + BucketMetricsPipelineAggregator(StreamInput in) throws IOException { super(in); format = in.readNamedWriteable(DocValueFormat.class); gapPolicy = GapPolicy.readFrom(in); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java index a63fd005f9c..db56779559a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketscript; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -26,9 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java index 042a30695c6..bd9371815fa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketscript; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,8 +29,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -48,9 +46,9 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator { private final Script script; private final Map bucketsPathsMap; - public BucketScriptPipelineAggregator(String name, Map bucketsPathsMap, Script script, DocValueFormat formatter, + BucketScriptPipelineAggregator(String name, Map bucketsPathsMap, Script script, DocValueFormat formatter, GapPolicy gapPolicy, Map metadata) { - super(name, bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]), metadata); + super(name, bucketsPathsMap.values().toArray(new String[0]), metadata); this.bucketsPathsMap = bucketsPathsMap; this.script = script; this.formatter = formatter; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java index cb8ba81cee6..f0497932b21 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketselector; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -25,9 +25,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregator.java index 06beab04aa6..a17e710c754 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketselector/BucketSelectorPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregator.java @@ -17,8 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketselector; - +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -28,7 +27,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -40,14 +38,12 @@ import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resol public class BucketSelectorPipelineAggregator extends PipelineAggregator { private GapPolicy gapPolicy; - private Script script; - private Map bucketsPathsMap; - public BucketSelectorPipelineAggregator(String name, Map bucketsPathsMap, Script script, GapPolicy gapPolicy, + BucketSelectorPipelineAggregator(String name, Map bucketsPathsMap, Script script, GapPolicy gapPolicy, Map metadata) { - super(name, bucketsPathsMap.values().toArray(new String[bucketsPathsMap.size()]), metadata); + super(name, bucketsPathsMap.values().toArray(new String[0]), metadata); this.bucketsPathsMap = bucketsPathsMap; this.script = script; this.gapPolicy = gapPolicy; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java index 15c37061cd9..0ce4c087206 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketsort; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,9 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java similarity index 94% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java index e10d5c35800..e98fdec9927 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketsort; +package org.elasticsearch.search.aggregations.pipeline; import org.apache.lucene.util.PriorityQueue; @@ -26,9 +26,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; @@ -47,9 +45,9 @@ public class BucketSortPipelineAggregator extends PipelineAggregator { private final Integer size; private final GapPolicy gapPolicy; - public BucketSortPipelineAggregator(String name, List sorts, int from, Integer size, GapPolicy gapPolicy, + BucketSortPipelineAggregator(String name, List sorts, int from, Integer size, GapPolicy gapPolicy, Map metadata) { - super(name, sorts.stream().map(s -> s.getFieldName()).toArray(String[]::new), metadata); + super(name, sorts.stream().map(FieldSortBuilder::getFieldName).toArray(String[]::new), metadata); this.sorts = sorts; this.from = from; this.size = size; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java index 209af3c03a7..6230fff0b0d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.cumulativesum; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -30,9 +30,6 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java similarity index 91% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java index e1441132452..a70144b421a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/cumulativesum/CumulativeSumPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.cumulativesum; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,8 +29,6 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -44,8 +42,8 @@ import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.resol public class CumulativeSumPipelineAggregator extends PipelineAggregator { private final DocValueFormat formatter; - public CumulativeSumPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, - Map metadata) { + CumulativeSumPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, + Map metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/Derivative.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/Derivative.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/Derivative.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/Derivative.java index 56f12c0876b..3e2723dd558 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/Derivative.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/Derivative.java @@ -17,16 +17,14 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.derivative; - -import org.elasticsearch.search.aggregations.pipeline.SimpleValue; +package org.elasticsearch.search.aggregations.pipeline; public interface Derivative extends SimpleValue { /** * Returns the normalized value. If no normalised factor has been specified * this method will return {@link #value()} - * + * * @return the normalized value */ double normalizedValue(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java index 5fac90b0948..a8ee1293dc9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.derivative; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -35,9 +35,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggre import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.joda.time.DateTimeZone; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregator.java index 3fe60f23cf3..303691d1ceb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/DerivativePipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.derivative; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -45,8 +44,8 @@ public class DerivativePipelineAggregator extends PipelineAggregator { private final GapPolicy gapPolicy; private final Double xAxisUnits; - public DerivativePipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, GapPolicy gapPolicy, Long xAxisUnits, - Map metadata) { + DerivativePipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, GapPolicy gapPolicy, Long xAxisUnits, + Map metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; this.gapPolicy = gapPolicy; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/EwmaModel.java similarity index 94% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/EwmaModel.java index 027536854cc..ad2532c3b50 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/EwmaModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/EwmaModel.java @@ -17,14 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -39,7 +37,7 @@ import java.util.Objects; public class EwmaModel extends MovAvgModel { public static final String NAME = "ewma"; - public static final double DEFAULT_ALPHA = 0.3; + private static final double DEFAULT_ALPHA = 0.3; /** * Controls smoothing of data. Also known as "level" value. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucket.java similarity index 91% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucket.java index 9e3c7cf88f6..a92fcff9d61 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucket.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.metrics.ExtendedStats; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketParser.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketParser.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketParser.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketParser.java index d5e9e4f11e5..b602f18dba3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketParser.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketParser.java @@ -17,11 +17,10 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java index 84dcb03fbe9..10347e40354 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,8 +25,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; import java.util.Collection; @@ -113,4 +111,4 @@ public class ExtendedStatsBucketPipelineAggregationBuilder public String getWriteableName() { return NAME; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java index 03ed20d494f..307bf505229 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregator.java @@ -17,15 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; import java.io.IOException; import java.util.List; @@ -39,7 +37,7 @@ public class ExtendedStatsBucketPipelineAggregator extends BucketMetricsPipeline private double max = Double.NEGATIVE_INFINITY; private double sumOfSqrs = 1; - protected ExtendedStatsBucketPipelineAggregator(String name, String[] bucketsPaths, double sigma, GapPolicy gapPolicy, + ExtendedStatsBucketPipelineAggregator(String name, String[] bucketsPaths, double sigma, GapPolicy gapPolicy, DocValueFormat formatter, Map metaData) { super(name, bucketsPaths, gapPolicy, formatter, metaData); this.sigma = sigma; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/HoltLinearModel.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/HoltLinearModel.java index d029bde29ad..ec40d2b18b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltLinearModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/HoltLinearModel.java @@ -17,14 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -38,8 +36,8 @@ import java.util.Objects; public class HoltLinearModel extends MovAvgModel { public static final String NAME = "holt"; - public static final double DEFAULT_ALPHA = 0.3; - public static final double DEFAULT_BETA = 0.1; + private static final double DEFAULT_ALPHA = 0.3; + private static final double DEFAULT_BETA = 0.1; /** * Controls smoothing of data. Also known as "level" value. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/HoltWintersModel.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/HoltWintersModel.java index d61cb5c4187..df42689a211 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/HoltWintersModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/HoltWintersModel.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.ElasticsearchParseException; @@ -27,8 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; @@ -44,12 +42,12 @@ import java.util.Objects; public class HoltWintersModel extends MovAvgModel { public static final String NAME = "holt_winters"; - public static final double DEFAULT_ALPHA = 0.3; - public static final double DEFAULT_BETA = 0.1; - public static final double DEFAULT_GAMMA = 0.3; - public static final int DEFAULT_PERIOD = 1; - public static final SeasonalityType DEFAULT_SEASONALITY_TYPE = SeasonalityType.ADDITIVE; - public static final boolean DEFAULT_PAD = false; + private static final double DEFAULT_ALPHA = 0.3; + private static final double DEFAULT_BETA = 0.1; + private static final double DEFAULT_GAMMA = 0.3; + private static final int DEFAULT_PERIOD = 1; + private static final SeasonalityType DEFAULT_SEASONALITY_TYPE = SeasonalityType.ADDITIVE; + private static final boolean DEFAULT_PAD = false; public enum SeasonalityType { ADDITIVE((byte) 0, "add"), MULTIPLICATIVE((byte) 1, "mult"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/InternalBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/InternalBucketMetricValue.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java index a7ef024028f..8a3de634dbf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/InternalBucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValue.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -26,7 +26,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivative.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivative.java similarity index 91% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivative.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivative.java index 5f6c5793016..b8d5245846a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivative.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivative.java @@ -17,14 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.derivative; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.List; @@ -34,7 +32,7 @@ import java.util.Objects; public class InternalDerivative extends InternalSimpleValue implements Derivative { private final double normalizationFactor; - public InternalDerivative(String name, double value, double normalizationFactor, DocValueFormat formatter, + InternalDerivative(String name, double value, double normalizationFactor, DocValueFormat formatter, List pipelineAggregators, Map metaData) { super(name, value, formatter, pipelineAggregators, metaData); this.normalizationFactor = normalizationFactor; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java index c7f2943bfcf..b0b78eb0120 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucket.java @@ -17,13 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.List; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java index 97b43e26069..940511619b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucket.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.InternalMax; import org.elasticsearch.search.aggregations.metrics.Percentile; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.Arrays; @@ -42,7 +41,7 @@ public class InternalPercentilesBucket extends InternalNumericMetricsAggregation private double[] percents; private final transient Map percentileLookups = new HashMap<>(); - public InternalPercentilesBucket(String name, double[] percents, double[] percentiles, + InternalPercentilesBucket(String name, double[] percents, double[] percentiles, DocValueFormat formatter, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java index 2eac04a9581..2aac262a007 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValue.java @@ -35,7 +35,7 @@ public class InternalSimpleValue extends InternalNumericMetricsAggregation.Singl public static final String NAME = "simple_value"; protected final double value; - public InternalSimpleValue(String name, double value, DocValueFormat formatter, List pipelineAggregators, + InternalSimpleValue(String name, double value, DocValueFormat formatter, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.format = formatter; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/InternalStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalStatsBucket.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/InternalStatsBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalStatsBucket.java index 352402fff82..51d3cfc060f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/InternalStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/InternalStatsBucket.java @@ -17,13 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalStats; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.List; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/LinearModel.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/LinearModel.java index 38594052182..310403fca83 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/LinearModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/LinearModel.java @@ -17,15 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java index fc2e1cd3e23..852a3e378d0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java @@ -17,14 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregator.java similarity index 84% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregator.java index d17a592c349..046afc5a879 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregator.java @@ -17,15 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; import java.io.IOException; import java.util.ArrayList; @@ -37,7 +34,7 @@ public class MaxBucketPipelineAggregator extends BucketMetricsPipelineAggregator private List maxBucketKeys; private double maxValue; - protected MaxBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, + MaxBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, Map metaData) { super(name, bucketsPaths, gapPolicy, formatter, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java index 75cf756441b..b44ee869e2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java @@ -17,14 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregator.java similarity index 81% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregator.java index 88595d78261..d9d0e2a0c3a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregator.java @@ -17,15 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; import java.io.IOException; import java.util.ArrayList; @@ -37,7 +34,7 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator private List minBucketKeys; private double minValue; - protected MinBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, + MinBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, Map metaData) { super(name, bucketsPaths, gapPolicy, formatter, metaData); } @@ -73,7 +70,7 @@ public class MinBucketPipelineAggregator extends BucketMetricsPipelineAggregator @Override protected InternalAggregation buildAggregation(List pipelineAggregators, Map metadata) { - String[] keys = minBucketKeys.toArray(new String[minBucketKeys.size()]); + String[] keys = minBucketKeys.toArray(new String[0]); return new InternalBucketMetricValue(name(), keys, minValue, format, Collections.emptyList(), metaData()); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgModel.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgModel.java index f826c01adce..7c47d2eadf1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgModel.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.NamedWriteable; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgModelBuilder.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgModelBuilder.java index 0c74ead985e..1cb13dd200c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/MovAvgModelBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgModelBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.ToXContentFragment; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java index 1f36d5395b2..c88cc9ec8fe 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg; +package org.elasticsearch.search.aggregations.pipeline; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ParseFieldRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,12 +34,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import java.io.IOException; import java.text.ParseException; @@ -56,13 +51,13 @@ import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator. public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { public static final String NAME = "moving_avg"; - public static final ParseField MODEL = new ParseField("model"); + static final ParseField MODEL = new ParseField("model"); private static final ParseField WINDOW = new ParseField("window"); public static final ParseField SETTINGS = new ParseField("settings"); private static final ParseField PREDICT = new ParseField("predict"); private static final ParseField MINIMIZE = new ParseField("minimize"); private static final DeprecationLogger DEPRECATION_LOGGER - = new DeprecationLogger(Loggers.getLogger(MovAvgPipelineAggregationBuilder.class)); + = new DeprecationLogger(LogManager.getLogger(MovAvgPipelineAggregationBuilder.class)); private String format; private GapPolicy gapPolicy = GapPolicy.SKIP; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregator.java index 196f7cca473..10d1cdc5a71 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.common.io.stream.StreamInput; @@ -31,9 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; import java.io.IOException; import java.util.ArrayList; @@ -53,8 +50,8 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { private final int predict; private final boolean minimize; - public MovAvgPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, GapPolicy gapPolicy, - int window, int predict, MovAvgModel model, boolean minimize, Map metadata) { + MovAvgPipelineAggregator(String name, String[] bucketsPaths, DocValueFormat formatter, GapPolicy gapPolicy, + int window, int predict, MovAvgModel model, boolean minimize, Map metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; this.gapPolicy = gapPolicy; @@ -126,9 +123,9 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { if (model.hasValue(values.size())) { double movavg = model.next(values); - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> { - return (InternalAggregation) p; - }).collect(Collectors.toList()); + List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) + .map((p) -> (InternalAggregation) p) + .collect(Collectors.toList()); aggs.add(new InternalSimpleValue(name(), movavg, formatter, new ArrayList(), metaData())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java index 375125dbefc..321e7b3aa6f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movfn; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -34,9 +34,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorFactory; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorFactory; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.Collection; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregator.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregator.java index fc0ba7afac0..4f14df2d66d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movfn; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,9 +29,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctionScript.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctionScript.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctionScript.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctionScript.java index 2f6751b73cc..79e1f740729 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctionScript.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctionScript.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movfn; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.script.ScriptContext; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java index 020189d4619..6ad1bf714b2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovingFunctions.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movfn; +package org.elasticsearch.search.aggregations.pipeline; import java.util.Arrays; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ParsedBucketMetricValue.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ParsedBucketMetricValue.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java index 69e99352636..8e8515cb3d7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ParsedBucketMetricValue.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedBucketMetricValue.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -70,4 +70,4 @@ public class ParsedBucketMetricValue extends ParsedSingleValueNumericMetricsAggr bucketMetricValue.setName(name); return bucketMetricValue; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/ParsedDerivative.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/ParsedDerivative.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java index 2b871a99d9a..c017b6d60e3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/derivative/ParsedDerivative.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedDerivative.java @@ -17,14 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.derivative; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; import java.io.IOException; @@ -76,4 +75,4 @@ public class ParsedDerivative extends ParsedSimpleValue implements Derivative { } return builder; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ParsedExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ParsedExtendedStatsBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java index caa014c9b49..8823152cd59 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ParsedExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedExtendedStatsBucket.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/ParsedPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/ParsedPercentilesBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java index c635ff82735..360ed9de214 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/ParsedPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedPercentilesBucket.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/ParsedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/ParsedStatsBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java index 84ec05f4eef..bacdc5f4a8a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/ParsedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ParsedStatsBucket.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucket.java similarity index 91% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucket.java index 0dfe9d24582..5e0efb98a0e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucket.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.metrics.Percentiles; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java index 56dd0d3e786..49e065cdeef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; +package org.elasticsearch.search.aggregations.pipeline; import com.carrotsearch.hppc.DoubleArrayList; @@ -29,9 +29,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; import java.util.Arrays; @@ -42,8 +39,7 @@ import java.util.Objects; public class PercentilesBucketPipelineAggregationBuilder extends BucketMetricsPipelineAggregationBuilder { public static final String NAME = "percentiles_bucket"; - - public static final ParseField PERCENTS_FIELD = new ParseField("percents"); + static final ParseField PERCENTS_FIELD = new ParseField("percents"); private double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java similarity index 86% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java index 7f51a99d798..20c38ca05bd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java @@ -17,16 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; +package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -35,12 +32,11 @@ import java.util.List; import java.util.Map; public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator { - public final ParseField PERCENTS_FIELD = new ParseField("percents"); private final double[] percents; private List data; - protected PercentilesBucketPipelineAggregator(String name, double[] percents, String[] bucketsPaths, GapPolicy gapPolicy, + PercentilesBucketPipelineAggregator(String name, double[] percents, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, Map metaData) { super(name, bucketsPaths, gapPolicy, formatter, metaData); this.percents = percents; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java index a7e43c401e8..c2963f610ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.serialdiff; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -26,9 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -41,7 +39,6 @@ import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator. public class SerialDiffPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { public static final String NAME = "serial_diff"; - public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private static final ParseField GAP_POLICY = new ParseField("gap_policy"); private static final ParseField LAG = new ParseField("lag"); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java index d438104be7f..cb5d5e583ce 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.serialdiff; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.EvictingQueue; @@ -31,8 +31,6 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; import java.util.ArrayList; @@ -48,8 +46,8 @@ public class SerialDiffPipelineAggregator extends PipelineAggregator { private GapPolicy gapPolicy; private int lag; - public SerialDiffPipelineAggregator(String name, String[] bucketsPaths, @Nullable DocValueFormat formatter, GapPolicy gapPolicy, - int lag, Map metadata) { + SerialDiffPipelineAggregator(String name, String[] bucketsPaths, @Nullable DocValueFormat formatter, GapPolicy gapPolicy, + int lag, Map metadata) { super(name, bucketsPaths, metadata); this.formatter = formatter; this.gapPolicy = gapPolicy; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java index b78691455d5..c04bd9fa0df 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SiblingPipelineAggregator.java @@ -36,14 +36,14 @@ import java.util.stream.Collectors; import java.util.stream.StreamSupport; public abstract class SiblingPipelineAggregator extends PipelineAggregator { - protected SiblingPipelineAggregator(String name, String[] bucketsPaths, Map metaData) { + SiblingPipelineAggregator(String name, String[] bucketsPaths, Map metaData) { super(name, bucketsPaths, metaData); } /** * Read from a stream. */ - protected SiblingPipelineAggregator(StreamInput in) throws IOException { + SiblingPipelineAggregator(StreamInput in) throws IOException { super(in); } @@ -55,15 +55,15 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator { InternalMultiBucketAggregation multiBucketsAgg = (InternalMultiBucketAggregation) aggregation; List buckets = multiBucketsAgg.getBuckets(); List newBuckets = new ArrayList<>(); - for (int i = 0; i < buckets.size(); i++) { - InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i); + for (Bucket bucket1 : buckets) { + InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) bucket1; InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext); - List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map((p) -> { - return (InternalAggregation) p; - }).collect(Collectors.toList()); + List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) + .map((p) -> (InternalAggregation) p) + .collect(Collectors.toList()); aggs.add(aggToAdd); InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs), - bucket); + bucket); newBuckets.add(newBucket); } @@ -71,9 +71,9 @@ public abstract class SiblingPipelineAggregator extends PipelineAggregator { } else if (aggregation instanceof InternalSingleBucketAggregation) { InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation; InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext); - List aggs = StreamSupport.stream(singleBucketAgg.getAggregations().spliterator(), false).map((p) -> { - return (InternalAggregation) p; - }).collect(Collectors.toList()); + List aggs = StreamSupport.stream(singleBucketAgg.getAggregations().spliterator(), false) + .map((p) -> (InternalAggregation) p) + .collect(Collectors.toList()); aggs.add(aggToAdd); return singleBucketAgg.create(new InternalAggregations(aggs)); } else { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SimpleModel.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SimpleModel.java index b54dba242f9..a64131278a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/models/SimpleModel.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SimpleModel.java @@ -17,14 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg.models; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; import java.io.IOException; import java.text.ParseException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/SimulatedAnealingMinimizer.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SimulatedAnealingMinimizer.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/SimulatedAnealingMinimizer.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SimulatedAnealingMinimizer.java index 711ee2299cf..e157b2cec1c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/SimulatedAnealingMinimizer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SimulatedAnealingMinimizer.java @@ -17,10 +17,9 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movavg; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.collect.EvictingQueue; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; /** * A cost minimizer which will fit a MovAvgModel to the data. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucket.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucket.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucket.java index c29a27b8446..dd05d8328a0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucket.java @@ -1,4 +1,4 @@ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats; +package org.elasticsearch.search.aggregations.pipeline; /* * Licensed to Elasticsearch under one or more contributor diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java index c472f1a3487..f943f3318fc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java @@ -17,14 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregator.java similarity index 86% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregator.java index 4b1febf4448..6ba9a2bfed3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregator.java @@ -17,14 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; import java.io.IOException; import java.util.List; @@ -36,7 +34,7 @@ public class StatsBucketPipelineAggregator extends BucketMetricsPipelineAggregat private double min = Double.POSITIVE_INFINITY; private double max = Double.NEGATIVE_INFINITY; - protected StatsBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, + StatsBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, Map metaData) { super(name, bucketsPaths, gapPolicy, formatter, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java index e415f3adc40..920f7e9b0ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java @@ -17,14 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder; import java.io.IOException; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregator.java similarity index 82% rename from server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregator.java index 7efbf401d04..1f25651bc8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregator.java @@ -17,15 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; import java.io.IOException; import java.util.List; @@ -34,7 +31,7 @@ import java.util.Map; public class SumBucketPipelineAggregator extends BucketMetricsPipelineAggregator { private double sum = 0; - protected SumBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, + SumBucketPipelineAggregator(String name, String[] bucketsPaths, GapPolicy gapPolicy, DocValueFormat formatter, Map metaData) { super(name, bucketsPaths, gapPolicy, formatter, metaData); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java index c10a721415b..ca20e6ec478 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -45,11 +45,11 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.derivative.InternalDerivative; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalDerivative; +import org.elasticsearch.search.aggregations.pipeline.MovAvgModel; +import org.elasticsearch.search.aggregations.pipeline.SimpleModel; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 626a2264e1f..ac34a96f0d9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -71,10 +71,10 @@ import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetricTests import org.elasticsearch.search.aggregations.metrics.InternalTopHitsTests; import org.elasticsearch.search.aggregations.metrics.InternalValueCountTests; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValueTests; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValueTests; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.InternalPercentilesBucketTests; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.InternalExtendedStatsBucketTests; -import org.elasticsearch.search.aggregations.pipeline.derivative.InternalDerivativeTests; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValueTests; +import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucketTests; +import org.elasticsearch.search.aggregations.pipeline.InternalExtendedStatsBucketTests; +import org.elasticsearch.search.aggregations.pipeline.InternalDerivativeTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java index d8d7f416d2d..4cbc78acad1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesBuilderTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.CumulativeSumPipelineAggregationBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import org.junit.Before; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 8b636f2d6a6..7a4e0fb7059 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -35,8 +35,7 @@ import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders; -import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.ESTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java index 365b6ddc218..93d966d0a4c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Sum; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; import org.elasticsearch.test.ESIntegTestCase; import java.util.HashMap; @@ -33,7 +33,7 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.maxBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 1619989f38b..ce43f9df408 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -69,7 +69,7 @@ import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalTopHits; import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; @@ -87,7 +87,7 @@ import java.util.function.Function; import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketScript; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java index cb4b024f99d..9604b583e8b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java @@ -23,8 +23,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.InternalStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.InternalStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; import java.util.Collections; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AbstractBucketMetricsTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AbstractBucketMetricsTestCase.java similarity index 91% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AbstractBucketMetricsTestCase.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AbstractBucketMetricsTestCase.java index 2b53a236c3b..8fdc23a025b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AbstractBucketMetricsTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AbstractBucketMetricsTestCase.java @@ -17,10 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; +import org.elasticsearch.search.aggregations.pipeline.BucketMetricsPipelineAggregationBuilder; public abstract class AbstractBucketMetricsTestCase> extends BasePipelineAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index dd8938bc878..c0dd4601175 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg; +package org.elasticsearch.search.aggregations.pipeline; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -41,6 +41,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistog import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.AvgBucketPipelineAggregator; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java index 8514b1a0c0d..5b044af53f2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java @@ -37,7 +37,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.avgBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.avgBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AvgBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AvgBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java index c504aa3f461..0dc10cb7a7a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/AvgBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketTests.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.util.Collections; @@ -50,18 +48,18 @@ public class AvgBucketTests extends AbstractBucketMetricsTestCase builder.validate(null, aggBuilders, Collections.emptySet())); assertEquals(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " aggregation does not exist for aggregation [name]: invalid_agg>metric", ex.getMessage()); - + // Now try to point to a single bucket agg AvgBucketPipelineAggregationBuilder builder2 = new AvgBucketPipelineAggregationBuilder("name", "global>metric"); ex = expectThrows(IllegalArgumentException.class, () -> builder2.validate(null, aggBuilders, Collections.emptySet())); assertEquals("The first aggregation in " + PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + " must be a multi-bucket aggregation for aggregation [name] found :" + GlobalAggregationBuilder.class.getName() + " for buckets path: global>metric", ex.getMessage()); - + // Now try to point to a valid multi-bucket agg (no exception should be thrown) AvgBucketPipelineAggregationBuilder builder3 = new AvgBucketPipelineAggregationBuilder("name", "terms>metric"); builder3.validate(null, aggBuilders, Collections.emptySet()); - + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index bd92c73f997..040eb66e7cf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -47,7 +47,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketScript; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java index c213619183b..20684b6383f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregationBuilder; import java.util.HashMap; import java.util.Map; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java index 05de849854f..7314533d0b6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -46,8 +46,8 @@ import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketSelector; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketSelector; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java index 8dd63942d86..fb882786083 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.bucketselector.BucketSelectorPipelineAggregationBuilder; import java.util.HashMap; import java.util.Map; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java similarity index 98% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortIT.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java index df2d7e64f46..bc3610fca8e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketsort; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -28,7 +28,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Avg; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -45,7 +44,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketSort; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketSort; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortTests.java index 48ce6073bea..cb8d8db8cdc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSortTests.java @@ -16,10 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketsort; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; +import org.elasticsearch.search.aggregations.pipeline.BucketSortPipelineAggregationBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index 08337ef969f..961db6931fb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -45,8 +45,6 @@ import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java index 3b1514a8c38..edf879ce77f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; -import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder; public class CumulativeSumTests extends BasePipelineAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java index aaa296fc317..95710ead1a4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.Sum; -import org.elasticsearch.search.aggregations.pipeline.derivative.Derivative; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matcher; @@ -45,7 +44,7 @@ import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.derivative; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java index 5944777b628..7222bc19b59 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java @@ -32,8 +32,6 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.derivative.Derivative; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; @@ -49,8 +47,8 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogra import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.derivative; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.closeTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java index 4b98e2ee6b2..0196e2c85b0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; public class DerivativeTests extends BasePipelineAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index e1aa1dfce3f..436a583695b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucket; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -41,7 +40,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.extendedStatsBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.extendedStatsBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; @@ -367,7 +366,8 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) .extendedBounds(minRandomValue, maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum") + .gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketTests.java index 43303205b46..9930541cb00 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketTests.java @@ -17,15 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.util.Collections; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/InternalBucketMetricValueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/InternalBucketMetricValueTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java index 85c83d41058..b10767cbeb5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/InternalBucketMetricValueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalBucketMetricValueTests.java @@ -17,11 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.pipeline.BucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivativeTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivativeTests.java index 3ea7f105574..6522e7591e5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalDerivativeTests.java @@ -17,11 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.derivative; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.pipeline.InternalDerivative; +import org.elasticsearch.search.aggregations.pipeline.ParsedDerivative; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucketTests.java similarity index 79% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucketTests.java index 03481ab7f65..c647e388373 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalExtendedStatsBucketTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; @@ -25,6 +25,8 @@ import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.InternalExtendedStatsTests; import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalExtendedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.ParsedExtendedStatsBucket; import java.util.Collections; import java.util.List; @@ -33,8 +35,11 @@ import java.util.Map; public class InternalExtendedStatsBucketTests extends InternalExtendedStatsTests { @Override - protected InternalExtendedStatsBucket createInstance(String name, long count, double sum, double min, double max, double sumOfSqrs, - double sigma, DocValueFormat formatter, List pipelineAggregators, Map metaData) { + protected InternalExtendedStatsBucket createInstance(String name, long count, double sum, double min, + double max, double sumOfSqrs, + double sigma, DocValueFormat formatter, + List pipelineAggregators, + Map metaData) { return new InternalExtendedStatsBucket(name, count, sum, min, max, sumOfSqrs, sigma, formatter, pipelineAggregators, Collections.emptyMap()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java similarity index 97% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java index c1d3ffeb0e5..176966174c4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalPercentilesBucketTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; @@ -25,6 +25,8 @@ import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.InternalPercentilesBucket; +import org.elasticsearch.search.aggregations.pipeline.ParsedPercentilesBucket; import org.elasticsearch.test.InternalAggregationTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java index 4841c5e596a..232941ae392 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.PipelineAggregatorBuilders; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -39,8 +40,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilde import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -52,7 +51,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.maxBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.maxBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java index cbf31130d38..c55152c68c3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MaxBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketTests.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.util.Collections; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java index 82629363f8d..081304d0709 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -38,7 +37,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.minBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.minBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java index eca1db24ff7..317f1360c78 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/MinBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.util.Collections; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgIT.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgIT.java index 41bbf053ff1..bfc04151a5c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.moving.avg; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -29,16 +29,6 @@ import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.metrics.Avg; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; -import org.elasticsearch.search.aggregations.pipeline.SimpleValue; -import org.elasticsearch.search.aggregations.pipeline.derivative.Derivative; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltWintersModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModelBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -58,8 +48,8 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogra import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.min; import static org.elasticsearch.search.aggregations.AggregationBuilders.range; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.movingAvg; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.derivative; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.movingAvg; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -204,7 +194,9 @@ public class MovAvgIT extends ESIntegTestCase { } else { // If this isn't a gap, or is a _count, just insert the value - metricValue = target.equals(MetricTarget.VALUE) ? PipelineAggregationHelperTests.calculateMetric(docValues, metric) : mockBucket.count; + metricValue = target.equals(MetricTarget.VALUE) + ? PipelineAggregationHelperTests.calculateMetric(docValues, metric) + : mockBucket.count; } if (window.size() > 0) { @@ -663,8 +655,11 @@ public class MovAvgIT extends ESIntegTestCase { .interval(1) .subAggregation(avg("avg").field(VALUE_FIELD)) .subAggregation( - movingAvg("movavg_values", "avg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy).predict(5))).execute().actionGet(); + movingAvg("movavg_values", "avg") + .window(windowSize) + .modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy).predict(5))) + .execute().actionGet(); assertSearchResponse(response); @@ -879,11 +874,17 @@ public class MovAvgIT extends ESIntegTestCase { .subAggregation(avg("avg").field(VALUE_FIELD)) .subAggregation(derivative("deriv", "avg").gapPolicy(gapPolicy)) .subAggregation( - movingAvg("avg_movavg", "avg").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy).predict(12)) + movingAvg("avg_movavg", "avg") + .window(windowSize) + .modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy) + .predict(12)) .subAggregation( - movingAvg("deriv_movavg", "deriv").window(windowSize).modelBuilder(new SimpleModel.SimpleModelBuilder()) - .gapPolicy(gapPolicy).predict(12)) + movingAvg("deriv_movavg", "deriv") + .window(windowSize) + .modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(gapPolicy) + .predict(12)) ).execute().actionGet(); assertSearchResponse(response); @@ -1281,7 +1282,8 @@ public class MovAvgIT extends ESIntegTestCase { if (expectedCount == null) { assertThat("[_count] movavg is not null", countMovAvg, nullValue()); } else if (Double.isNaN(expectedCount)) { - assertThat("[_count] movavg should be NaN, but is ["+countMovAvg.value()+"] instead", countMovAvg.value(), equalTo(Double.NaN)); + assertThat("[_count] movavg should be NaN, but is ["+countMovAvg.value()+"] instead", + countMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[_count] movavg is null", countMovAvg, notNullValue()); assertEquals("[_count] movavg does not match expected [" + countMovAvg.value() + " vs " + expectedCount + "]", @@ -1293,7 +1295,8 @@ public class MovAvgIT extends ESIntegTestCase { if (expectedValue == null) { assertThat("[value] movavg is not null", valuesMovAvg, Matchers.nullValue()); } else if (Double.isNaN(expectedValue)) { - assertThat("[value] movavg should be NaN, but is ["+valuesMovAvg.value()+"] instead", valuesMovAvg.value(), equalTo(Double.NaN)); + assertThat("[value] movavg should be NaN, but is ["+valuesMovAvg.value()+"] instead", + valuesMovAvg.value(), equalTo(Double.NaN)); } else { assertThat("[value] movavg is null", valuesMovAvg, notNullValue()); assertEquals("[value] movavg does not match expected [" + valuesMovAvg.value() + " vs " + expectedValue + "]", @@ -1325,8 +1328,8 @@ public class MovAvgIT extends ESIntegTestCase { } } - private ValuesSourceAggregationBuilder> randomMetric(String name, - String field) { + private ValuesSourceAggregationBuilder> randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgTests.java similarity index 88% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgTests.java index 659fad3f45c..24ec6d9d685 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgTests.java @@ -17,19 +17,19 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.moving.avg; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.movavg.MovAvgPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltWintersModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltWintersModel.SeasonalityType; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.MovAvgPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.EwmaModel; +import org.elasticsearch.search.aggregations.pipeline.HoltLinearModel; +import org.elasticsearch.search.aggregations.pipeline.HoltWintersModel; +import org.elasticsearch.search.aggregations.pipeline.HoltWintersModel.SeasonalityType; +import org.elasticsearch.search.aggregations.pipeline.LinearModel; +import org.elasticsearch.search.aggregations.pipeline.SimpleModel; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgUnitTests.java similarity index 97% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgUnitTests.java index 55c31013fd9..38ed1c1dc3f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovAvgUnitTests.java @@ -17,15 +17,15 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.moving.avg; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.collect.EvictingQueue; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.EwmaModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltLinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.HoltWintersModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.LinearModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; -import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import org.elasticsearch.search.aggregations.pipeline.EwmaModel; +import org.elasticsearch.search.aggregations.pipeline.HoltLinearModel; +import org.elasticsearch.search.aggregations.pipeline.HoltWintersModel; +import org.elasticsearch.search.aggregations.pipeline.LinearModel; +import org.elasticsearch.search.aggregations.pipeline.MovAvgModel; +import org.elasticsearch.search.aggregations.pipeline.SimpleModel; import org.elasticsearch.test.ESTestCase; import java.text.ParseException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilderSerializationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilderSerializationTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilderSerializationTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilderSerializationTests.java index 218cbdf62ca..49923640805 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnPipelineAggregationBuilderSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilderSerializationTests.java @@ -17,11 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movfn; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.Script; +import org.elasticsearch.search.aggregations.pipeline.MovFnPipelineAggregationBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java index db333a8ed7a..842320e2781 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnUnitTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movfn; +package org.elasticsearch.search.aggregations.pipeline; import org.apache.lucene.document.Document; import org.apache.lucene.document.LongPoint; @@ -42,6 +42,9 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; +import org.elasticsearch.search.aggregations.pipeline.MovFnPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.MovingFunctionScript; +import org.elasticsearch.search.aggregations.pipeline.MovingFunctions; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java similarity index 99% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java index 6d0e388e643..f8fe7159694 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnWhitelistedFunctionTests.java @@ -17,9 +17,10 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.movfn; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.collect.EvictingQueue; +import org.elasticsearch.search.aggregations.pipeline.MovingFunctions; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 8f77c305229..330bbc647d9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.metrics.Sum; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucket; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -42,7 +41,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.percentilesBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.percentilesBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java index a6040aaf9f6..7ad05059a73 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/PercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketTests.java @@ -17,15 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.util.Collections; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffIT.java similarity index 94% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffIT.java index 68257045137..8522beeecde 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/serialdiff/SerialDiffIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffIT.java @@ -17,16 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.serialdiff; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; -import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -43,7 +40,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.min; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.diff; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.diff; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -80,7 +77,8 @@ public class SerialDiffIT extends ESIntegTestCase { } } - private ValuesSourceAggregationBuilder> randomMetric(String name, String field) { + private ValuesSourceAggregationBuilder> randomMetric(String name, String field) { int rand = randomIntBetween(0,3); switch (rand) { @@ -189,7 +187,9 @@ public class SerialDiffIT extends ESIntegTestCase { } else { // If this isn't a gap, or is a _count, just insert the value - metricValue = target.equals(MetricTarget.VALUE) ? PipelineAggregationHelperTests.calculateMetric(docValues, metric) : mockBucket.count; + metricValue = target.equals(MetricTarget.VALUE) + ? PipelineAggregationHelperTests.calculateMetric(docValues, metric) + : mockBucket.count; } counter += 1; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java index 7e71be69b64..78bf6954c5e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SerialDifferenceTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.BasePipelineAggregationTestCase; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.serialdiff.SerialDiffPipelineAggregationBuilder; public class SerialDifferenceTests extends BasePipelineAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java index f5d409951e3..eddbe47cae8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucket; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -38,7 +37,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.statsBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.statsBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketTests.java index bcd90778136..bf2ef7615df 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/StatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketTests.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.util.Collections; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java index a803b9fe3d4..40499ffb561 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java @@ -37,7 +37,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.sumBucket; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.sumBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketTests.java index be6c7f92342..fdba8785241 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/SumBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketTests.java @@ -17,13 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import java.util.Collections; diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 3c4c0da6322..9ccee034b6b 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -25,8 +25,8 @@ import org.elasticsearch.index.similarity.ScriptedSimilarity.Doc; import org.elasticsearch.index.similarity.ScriptedSimilarity.Field; import org.elasticsearch.index.similarity.ScriptedSimilarity.Query; import org.elasticsearch.index.similarity.ScriptedSimilarity.Term; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; -import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions; +import org.elasticsearch.search.aggregations.pipeline.MovingFunctionScript; +import org.elasticsearch.search.aggregations.pipeline.MovingFunctions; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index facbc6ec84b..fad2b4e1dff 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -120,16 +120,16 @@ import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilde import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.ParsedBucketMetricValue; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.ParsedPercentilesBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ParsedExtendedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative; +import org.elasticsearch.search.aggregations.pipeline.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.ParsedBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.ParsedPercentilesBucket; +import org.elasticsearch.search.aggregations.pipeline.PercentilesBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ExtendedStatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ParsedExtendedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.DerivativePipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ParsedDerivative; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java index ea71156b0b0..14b51a942ad 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java @@ -14,7 +14,7 @@ import java.util.Collection; import java.util.Map; import java.util.Objects; -import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketSelector; +import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketSelector; public class AggFilter extends PipelineAgg { From f79bdec58af007c247069f64b8f828ee374a7f24 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 23 Oct 2018 22:36:57 +0200 Subject: [PATCH 10/67] INGEST: Document Pipeline Processor (#33418) * Added documentation for Pipeline Processor * Relates #33188 --- docs/reference/ingest/ingest-node.asciidoc | 114 +++++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index d23fc8de12b..eeb914facc2 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -2109,6 +2109,120 @@ Converts a string to its lowercase equivalent. -------------------------------------------------- // NOTCONSOLE +[[pipeline-processor]] +=== Pipeline Processor +Executes another pipeline. + +[[pipeline-options]] +.Pipeline Options +[options="header"] +|====== +| Name | Required | Default | Description +| `name` | yes | - | The name of the pipeline to execute +|====== + +[source,js] +-------------------------------------------------- +{ + "pipeline": { + "name": "inner-pipeline" + } +} +-------------------------------------------------- +// NOTCONSOLE + +An example of using this processor for nesting pipelines would be: + +Define an inner pipeline: + +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/pipelineA +{ + "description" : "inner pipeline", + "processors" : [ + { + "set" : { + "field": "inner_pipeline_set", + "value": "inner" + } + } + ] +} +-------------------------------------------------- +// CONSOLE + +Define another pipeline that uses the previously defined inner pipeline: + +[source,js] +-------------------------------------------------- +PUT _ingest/pipeline/pipelineB +{ + "description" : "outer pipeline", + "processors" : [ + { + "pipeline" : { + "name": "pipelineA" + } + }, + { + "set" : { + "field": "outer_pipeline_set", + "value": "outer" + } + } + ] +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Now indexing a document while applying the outer pipeline will see the inner pipeline executed +from the outer pipeline: + +[source,js] +-------------------------------------------------- +PUT /myindex/_doc/1?pipeline=pipelineB +{ + "field": "value" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Response from the index request: + +[source,js] +-------------------------------------------------- +{ + "_index": "myindex", + "_type": "_doc", + "_id": "1", + "_version": 1, + "result": "created", + "_shards": { + "total": 2, + "successful": 1, + "failed": 0 + }, + "_seq_no": 0, + "_primary_term": 1, +} +-------------------------------------------------- +// TESTRESPONSE + +Indexed document: + +[source,js] +-------------------------------------------------- +{ + "field": "value", + "inner_pipeline_set": "inner", + "outer_pipeline_set": "outer" +} +-------------------------------------------------- +// NOTCONSOLE + [[remove-processor]] === Remove Processor Removes existing fields. If one field doesn't exist, an exception will be thrown. From 1f42024243aa083d8b1cdb6827d7a691fae6d071 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 23 Oct 2018 14:00:53 -0700 Subject: [PATCH 11/67] Mute NodeSubclassTests#testReplaceChildren while we await a fix. --- .../java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java index 90fd7392960..4c763fa95cd 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java @@ -147,6 +147,7 @@ public class NodeSubclassTests> extends ESTestCas /** * Test {@link Node#replaceChildren} implementation on {@link #subclass}. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34775") public void testReplaceChildren() throws Exception { Constructor ctor = longestCtor(subclass); Object[] nodeCtorArgs = ctorArgs(ctor); From 0efba0675e633e98660447dadd0b71183226934e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 23 Oct 2018 23:24:32 +0200 Subject: [PATCH 12/67] [CCR] Add qa test library (#34611) * Introduced test qa lib that all CCR qa modules depend on to avoid test code duplication. --- x-pack/plugin/ccr/qa/build.gradle | 7 + x-pack/plugin/ccr/qa/chain/build.gradle | 1 + .../org/elasticsearch/xpack/ccr/ChainIT.java | 107 +------- .../build.gradle | 5 +- .../xpack/ccr/CcrMultiClusterLicenseIT.java | 15 +- .../multi-cluster-with-security/build.gradle | 5 +- .../xpack/ccr/FollowIndexSecurityIT.java | 224 +--------------- .../plugin/ccr/qa/multi-cluster/build.gradle | 5 +- .../xpack/ccr/FollowIndexIT.java | 176 +------------ .../xpack/ccr/ESCCRRestTestCase.java | 245 ++++++++++++++++++ 10 files changed, 294 insertions(+), 496 deletions(-) create mode 100644 x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java diff --git a/x-pack/plugin/ccr/qa/build.gradle b/x-pack/plugin/ccr/qa/build.gradle index dc44f8f753d..f408e6a78b6 100644 --- a/x-pack/plugin/ccr/qa/build.gradle +++ b/x-pack/plugin/ccr/qa/build.gradle @@ -1,5 +1,12 @@ import org.elasticsearch.gradle.test.RestIntegTestTask +apply plugin: 'elasticsearch.build' +test.enabled = false + +dependencies { + compile project(':test:framework') +} + subprojects { project.tasks.withType(RestIntegTestTask) { final File xPackResources = new File(xpackProject('plugin').projectDir, 'src/test/resources') diff --git a/x-pack/plugin/ccr/qa/chain/build.gradle b/x-pack/plugin/ccr/qa/chain/build.gradle index 7b3e20f86ce..f93feb4a66a 100644 --- a/x-pack/plugin/ccr/qa/chain/build.gradle +++ b/x-pack/plugin/ccr/qa/chain/build.gradle @@ -5,6 +5,7 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ccr'), configuration: 'runtime') + testCompile project(':x-pack:plugin:ccr:qa') } task leaderClusterTest(type: RestIntegTestTask) { diff --git a/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java b/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java index 1a8a8e0096f..e5a37aa829b 100644 --- a/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java +++ b/x-pack/plugin/ccr/qa/chain/src/test/java/org/elasticsearch/xpack/ccr/ChainIT.java @@ -6,34 +6,10 @@ package org.elasticsearch.xpack.ccr; -import org.apache.http.HttpHost; -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.ESRestTestCase; -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; - -public class ChainIT extends ESRestTestCase { - - private final String targetCluster = System.getProperty("tests.target_cluster"); - - @Override - protected boolean preserveClusterUponCompletion() { - return true; - } +public class ChainIT extends ESCCRRestTestCase { public void testFollowIndex() throws Exception { final int numDocs = 128; @@ -60,23 +36,23 @@ public class ChainIT extends ESRestTestCase { index(client(), leaderIndexName, Integer.toString(i), "field", i, "filtered_field", "true"); } refresh(leaderIndexName); - verifyDocuments(leaderIndexName, numDocs); + verifyDocuments(leaderIndexName, numDocs, "filtered_field:true"); } else if ("middle".equals(targetCluster)) { logger.info("Running against middle cluster"); followIndex("leader_cluster", leaderIndexName, middleIndexName); - assertBusy(() -> verifyDocuments(middleIndexName, numDocs)); + assertBusy(() -> verifyDocuments(middleIndexName, numDocs, "filtered_field:true")); try (RestClient leaderClient = buildLeaderClient()) { int id = numDocs; index(leaderClient, leaderIndexName, Integer.toString(id), "field", id, "filtered_field", "true"); index(leaderClient, leaderIndexName, Integer.toString(id + 1), "field", id + 1, "filtered_field", "true"); index(leaderClient, leaderIndexName, Integer.toString(id + 2), "field", id + 2, "filtered_field", "true"); } - assertBusy(() -> verifyDocuments(middleIndexName, numDocs + 3)); + assertBusy(() -> verifyDocuments(middleIndexName, numDocs + 3, "filtered_field:true")); } else if ("follow".equals(targetCluster)) { logger.info("Running against follow cluster"); final String followIndexName = "follow"; followIndex("middle_cluster", middleIndexName, followIndexName); - assertBusy(() -> verifyDocuments(followIndexName, numDocs + 3)); + assertBusy(() -> verifyDocuments(followIndexName, numDocs + 3, "filtered_field:true")); try (RestClient leaderClient = buildLeaderClient()) { int id = numDocs + 3; @@ -86,82 +62,13 @@ public class ChainIT extends ESRestTestCase { } try (RestClient middleClient = buildMiddleClient()) { - assertBusy(() -> verifyDocuments(middleIndexName, numDocs + 6, middleClient)); + assertBusy(() -> verifyDocuments(middleIndexName, numDocs + 6, "filtered_field:true", middleClient)); } - assertBusy(() -> verifyDocuments(followIndexName, numDocs + 6)); + assertBusy(() -> verifyDocuments(followIndexName, numDocs + 6, "filtered_field:true")); } else { fail("unexpected target cluster [" + targetCluster + "]"); } } - private static void index(RestClient client, String index, String id, Object... fields) throws IOException { - XContentBuilder document = jsonBuilder().startObject(); - for (int i = 0; i < fields.length; i += 2) { - document.field((String) fields[i], fields[i + 1]); - } - document.endObject(); - final Request request = new Request("POST", "/" + index + "/_doc/" + id); - request.setJsonEntity(Strings.toString(document)); - assertOK(client.performRequest(request)); - } - - private static void refresh(String index) throws IOException { - assertOK(client().performRequest(new Request("POST", "/" + index + "/_refresh"))); - } - - private static void followIndex(String leaderCluster, String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); - request.setJsonEntity( - "{\"leader_cluster\": \"" + leaderCluster + "\", \"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); - assertOK(client().performRequest(request)); - } - - private static void verifyDocuments(String index, int expectedNumDocs) throws IOException { - verifyDocuments(index, expectedNumDocs, client()); - } - - private static void verifyDocuments(final String index, final int expectedNumDocs, final RestClient client) throws IOException { - final Request request = new Request("GET", "/" + index + "/_search"); - request.addParameter("size", Integer.toString(expectedNumDocs)); - request.addParameter("sort", "field:asc"); - request.addParameter("q", "filtered_field:true"); - Map response = toMap(client.performRequest(request)); - - int numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertThat(numDocs, equalTo(expectedNumDocs)); - - List hits = (List) XContentMapValues.extractValue("hits.hits", response); - assertThat(hits.size(), equalTo(expectedNumDocs)); - for (int i = 0; i < expectedNumDocs; i++) { - int value = (int) XContentMapValues.extractValue("_source.field", (Map) hits.get(i)); - assertThat(i, equalTo(value)); - } - } - - private static Map toMap(Response response) throws IOException { - return toMap(EntityUtils.toString(response.getEntity())); - } - - private static Map toMap(String response) { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); - } - - private RestClient buildLeaderClient() throws IOException { - assert "leader".equals(targetCluster) == false; - return buildClient(System.getProperty("tests.leader_host")); - } - - private RestClient buildMiddleClient() throws IOException { - assert "middle".equals(targetCluster) == false; - return buildClient(System.getProperty("tests.middle_host")); - } - - private RestClient buildClient(final String url) throws IOException { - int portSeparator = url.lastIndexOf(':'); - HttpHost httpHost = new HttpHost(url.substring(0, portSeparator), - Integer.parseInt(url.substring(portSeparator + 1)), getProtocol()); - return buildClient(Settings.EMPTY, new HttpHost[]{httpHost}); - } - } diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle index 845c9df533d..7f1dd2c3211 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle @@ -5,6 +5,7 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ccr'), configuration: 'runtime') + testCompile project(':x-pack:plugin:ccr:qa:') } task leaderClusterTest(type: RestIntegTestTask) { @@ -17,7 +18,7 @@ leaderClusterTestCluster { } leaderClusterTestRunner { - systemProperty 'tests.is_leader_cluster', 'true' + systemProperty 'tests.target_cluster', 'leader' } task writeJavaPolicy { @@ -49,7 +50,7 @@ followClusterTestCluster { followClusterTestRunner { systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy" - systemProperty 'tests.is_leader_cluster', 'false' + systemProperty 'tests.target_cluster', 'follow' systemProperty 'tests.leader_host', "${-> leaderClusterTest.nodes.get(0).httpUri()}" systemProperty 'log', "${-> followClusterTest.getNodes().get(0).homeDir}/logs/${-> followClusterTest.getNodes().get(0).clusterName}.log" finalizedBy 'leaderClusterTestCluster#stop' diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index 988f6b97bd2..7e85c19d7b9 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -9,9 +9,7 @@ package org.elasticsearch.xpack.ccr; import org.apache.lucene.util.Constants; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.test.rest.ESRestTestCase; import java.nio.file.Files; import java.util.Iterator; @@ -22,17 +20,10 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; -public class CcrMultiClusterLicenseIT extends ESRestTestCase { - - private final boolean runningAgainstLeaderCluster = Booleans.parseBoolean(System.getProperty("tests.is_leader_cluster")); - - @Override - protected boolean preserveClusterUponCompletion() { - return true; - } +public class CcrMultiClusterLicenseIT extends ESCCRRestTestCase { public void testFollow() { - if (runningAgainstLeaderCluster == false) { + if ("follow".equals(targetCluster)) { final Request request = new Request("PUT", "/follower/_ccr/follow"); request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"leader\"}"); assertNonCompliantLicense(request); @@ -41,7 +32,7 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase { public void testAutoFollow() throws Exception { assumeFalse("windows is the worst", Constants.WINDOWS); - if (runningAgainstLeaderCluster == false) { + if ("follow".equals(targetCluster)) { final Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); request.setJsonEntity("{\"leader_index_patterns\":[\"*\"], \"leader_cluster\": \"leader_cluster\"}"); client().performRequest(request); diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle index 418c4e6d249..f005a71b165 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle @@ -5,6 +5,7 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ccr'), configuration: 'runtime') + testCompile project(':x-pack:plugin:ccr:qa') } task leaderClusterTest(type: RestIntegTestTask) { @@ -35,7 +36,7 @@ leaderClusterTestCluster { } leaderClusterTestRunner { - systemProperty 'tests.is_leader_cluster', 'true' + systemProperty 'tests.target_cluster', 'leader' } task followClusterTest(type: RestIntegTestTask) {} @@ -66,7 +67,7 @@ followClusterTestCluster { } followClusterTestRunner { - systemProperty 'tests.is_leader_cluster', 'false' + systemProperty 'tests.target_cluster', 'follow' systemProperty 'tests.leader_host', "${-> leaderClusterTest.nodes.get(0).httpUri()}" finalizedBy 'leaderClusterTestCluster#stop' } diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index d5e7cbcce49..18e061f3790 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -5,39 +5,24 @@ */ package org.elasticsearch.xpack.ccr; -import org.apache.http.HttpHost; -import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.rest.ESRestTestCase; -import java.io.IOException; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; -public class FollowIndexSecurityIT extends ESRestTestCase { - - private final boolean runningAgainstLeaderCluster = Booleans.parseBoolean(System.getProperty("tests.is_leader_cluster")); +public class FollowIndexSecurityIT extends ESCCRRestTestCase { @Override protected Settings restClientSettings() { @@ -55,16 +40,11 @@ public class FollowIndexSecurityIT extends ESRestTestCase { .build(); } - @Override - protected boolean preserveClusterUponCompletion() { - return true; - } - public void testFollowIndex() throws Exception { final int numDocs = 16; final String allowedIndex = "allowed-index"; final String unallowedIndex = "unallowed-index"; - if (runningAgainstLeaderCluster) { + if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); Settings indexSettings = Settings.builder().put("index.soft_deletes.enabled", true).build(); createIndex(allowedIndex, indexSettings); @@ -78,10 +58,10 @@ public class FollowIndexSecurityIT extends ESRestTestCase { index(unallowedIndex, Integer.toString(i), "field", i); } refresh(allowedIndex); - verifyDocuments(adminClient(), allowedIndex, numDocs); + verifyDocuments(allowedIndex, numDocs, "*:*"); } else { - follow(client(), allowedIndex, allowedIndex); - assertBusy(() -> verifyDocuments(client(), allowedIndex, numDocs)); + followIndex(client(), "leader_cluster", allowedIndex, allowedIndex); + assertBusy(() -> verifyDocuments(allowedIndex, numDocs, "*:*")); assertThat(countCcrNodeTasks(), equalTo(1)); assertBusy(() -> verifyCcrMonitoring(allowedIndex, allowedIndex)); assertOK(client().performRequest(new Request("POST", "/" + allowedIndex + "/_ccr/pause_follow"))); @@ -110,30 +90,31 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(e.getMessage(), containsString("follow index [" + allowedIndex + "] does not have ccr metadata")); // User does not have manage_follow_index index privilege for 'unallowedIndex': - e = expectThrows(ResponseException.class, () -> follow(client(), unallowedIndex, unallowedIndex)); + e = expectThrows(ResponseException.class, () -> followIndex(client(), "leader_cluster", unallowedIndex, unallowedIndex)); assertThat(e.getMessage(), containsString("action [indices:admin/xpack/ccr/put_follow] is unauthorized for user [test_ccr]")); // Verify that the follow index has not been created and no node tasks are running - assertThat(indexExists(adminClient(), unallowedIndex), is(false)); + assertThat(indexExists(unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); // User does have manage_follow_index index privilege on 'allowed' index, // but not read / monitor roles on 'disallowed' index: - e = expectThrows(ResponseException.class, () -> follow(client(), unallowedIndex, allowedIndex)); + e = expectThrows(ResponseException.class, () -> followIndex(client(), "leader_cluster", unallowedIndex, allowedIndex)); assertThat(e.getMessage(), containsString("insufficient privileges to follow index [unallowed-index], " + "privilege for action [indices:monitor/stats] is missing, " + "privilege for action [indices:data/read/xpack/ccr/shard_changes] is missing")); // Verify that the follow index has not been created and no node tasks are running - assertThat(indexExists(adminClient(), unallowedIndex), is(false)); + assertThat(indexExists(unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); - follow(adminClient(), unallowedIndex, unallowedIndex); + followIndex(adminClient(), "leader_cluster", unallowedIndex, unallowedIndex); pauseFollow(adminClient(), unallowedIndex); e = expectThrows(ResponseException.class, () -> resumeFollow(unallowedIndex)); assertThat(e.getMessage(), containsString("insufficient privileges to follow index [unallowed-index], " + "privilege for action [indices:monitor/stats] is missing, " + "privilege for action [indices:data/read/xpack/ccr/shard_changes] is missing")); + assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); e = expectThrows(ResponseException.class, () -> client().performRequest(new Request("POST", "/" + unallowedIndex + "/_ccr/unfollow"))); @@ -145,7 +126,7 @@ public class FollowIndexSecurityIT extends ESRestTestCase { } public void testAutoFollowPatterns() throws Exception { - assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); + assumeFalse("Test should only run when both clusters are running", "leader".equals(targetCluster)); String allowedIndex = "logs-eu-20190101"; String disallowedIndex = "logs-us-20190101"; @@ -180,9 +161,9 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertBusy(() -> { ensureYellow(allowedIndex); - verifyDocuments(adminClient(), allowedIndex, 5); + verifyDocuments(allowedIndex, 5, "*:*"); }); - assertThat(indexExists(adminClient(), disallowedIndex), is(false)); + assertThat(indexExists(disallowedIndex), is(false)); assertBusy(() -> { verifyCcrMonitoring(allowedIndex, allowedIndex); verifyAutoFollowMonitoring(); @@ -194,181 +175,4 @@ public class FollowIndexSecurityIT extends ESRestTestCase { pauseFollow(client(), allowedIndex); } - private int countCcrNodeTasks() throws IOException { - final Request request = new Request("GET", "/_tasks"); - request.addParameter("detailed", "true"); - Map rsp1 = toMap(adminClient().performRequest(request)); - Map nodes = (Map) rsp1.get("nodes"); - assertThat(nodes.size(), equalTo(1)); - Map node = (Map) nodes.values().iterator().next(); - Map nodeTasks = (Map) node.get("tasks"); - int numNodeTasks = 0; - for (Map.Entry entry : nodeTasks.entrySet()) { - Map nodeTask = (Map) entry.getValue(); - String action = (String) nodeTask.get("action"); - if (action.startsWith("xpack/ccr/shard_follow_task")) { - numNodeTasks++; - } - } - return numNodeTasks; - } - - private static void index(String index, String id, Object... fields) throws IOException { - index(adminClient(), index, id, fields); - } - - private static void index(RestClient client, String index, String id, Object... fields) throws IOException { - XContentBuilder document = jsonBuilder().startObject(); - for (int i = 0; i < fields.length; i += 2) { - document.field((String) fields[i], fields[i + 1]); - } - document.endObject(); - final Request request = new Request("POST", "/" + index + "/_doc/" + id); - request.setJsonEntity(Strings.toString(document)); - assertOK(client.performRequest(request)); - } - - private static void refresh(String index) throws IOException { - assertOK(adminClient().performRequest(new Request("POST", "/" + index + "/_refresh"))); - } - - private static void resumeFollow(String followIndex) throws IOException { - final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); - request.setJsonEntity("{\"poll_timeout\": \"10ms\"}"); - assertOK(client().performRequest(request)); - } - - private static void follow(RestClient client, String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); - request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"" + leaderIndex + - "\", \"poll_timeout\": \"10ms\"}"); - assertOK(client.performRequest(request)); - } - - void verifyDocuments(RestClient client, String index, int expectedNumDocs) throws IOException { - final Request request = new Request("GET", "/" + index + "/_search"); - request.addParameter("pretty", "true"); - request.addParameter("size", Integer.toString(expectedNumDocs)); - request.addParameter("sort", "field:asc"); - Map response = toMap(client.performRequest(request)); - - int numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertThat(numDocs, equalTo(expectedNumDocs)); - - List hits = (List) XContentMapValues.extractValue("hits.hits", response); - assertThat(hits.size(), equalTo(expectedNumDocs)); - for (int i = 0; i < expectedNumDocs; i++) { - int value = (int) XContentMapValues.extractValue("_source.field", (Map) hits.get(i)); - assertThat(i, equalTo(value)); - } - } - - private static Map toMap(Response response) throws IOException { - return toMap(EntityUtils.toString(response.getEntity())); - } - - private static Map toMap(String response) { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); - } - - protected static void createIndex(String name, Settings settings) throws IOException { - createIndex(name, settings, ""); - } - - protected static void createIndex(String name, Settings settings, String mapping) throws IOException { - final Request request = new Request("PUT", "/" + name); - request.setJsonEntity("{ \"settings\": " + Strings.toString(settings) + ", \"mappings\" : {" + mapping + "} }"); - assertOK(adminClient().performRequest(request)); - } - - private static void ensureYellow(String index) throws IOException { - Request request = new Request("GET", "/_cluster/health/" + index); - request.addParameter("wait_for_status", "yellow"); - request.addParameter("wait_for_no_relocating_shards", "true"); - request.addParameter("wait_for_no_initializing_shards", "true"); - request.addParameter("timeout", "70s"); - request.addParameter("level", "shards"); - adminClient().performRequest(request); - } - - private RestClient buildLeaderClient() throws IOException { - assert runningAgainstLeaderCluster == false; - String leaderUrl = System.getProperty("tests.leader_host"); - int portSeparator = leaderUrl.lastIndexOf(':'); - HttpHost httpHost = new HttpHost(leaderUrl.substring(0, portSeparator), - Integer.parseInt(leaderUrl.substring(portSeparator + 1)), getProtocol()); - return buildClient(restAdminSettings(), new HttpHost[]{httpHost}); - } - - private static boolean indexExists(RestClient client, String index) throws IOException { - Response response = client.performRequest(new Request("HEAD", "/" + index)); - return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); - } - - private static void pauseFollow(RestClient client, String followIndex) throws IOException { - assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); - } - - private static void verifyCcrMonitoring(String expectedLeaderIndex, String expectedFollowerIndex) throws IOException { - Request request = new Request("GET", "/.monitoring-*/_search"); - request.setJsonEntity("{\"query\": {\"term\": {\"ccr_stats.leader_index\": \"" + expectedLeaderIndex + "\"}}}"); - Map response; - try { - response = toMap(adminClient().performRequest(request)); - } catch (ResponseException e) { - throw new AssertionError("error while searching", e); - } - - int numberOfOperationsReceived = 0; - int numberOfOperationsIndexed = 0; - - List hits = (List) XContentMapValues.extractValue("hits.hits", response); - assertThat(hits.size(), greaterThanOrEqualTo(1)); - - for (int i = 0; i < hits.size(); i++) { - Map hit = (Map) hits.get(i); - String leaderIndex = (String) XContentMapValues.extractValue("_source.ccr_stats.leader_index", hit); - assertThat(leaderIndex, endsWith(expectedLeaderIndex)); - - final String followerIndex = (String) XContentMapValues.extractValue("_source.ccr_stats.follower_index", hit); - assertThat(followerIndex, equalTo(expectedFollowerIndex)); - - int foundNumberOfOperationsReceived = - (int) XContentMapValues.extractValue("_source.ccr_stats.operations_received", hit); - numberOfOperationsReceived = Math.max(numberOfOperationsReceived, foundNumberOfOperationsReceived); - int foundNumberOfOperationsIndexed = - (int) XContentMapValues.extractValue("_source.ccr_stats.number_of_operations_indexed", hit); - numberOfOperationsIndexed = Math.max(numberOfOperationsIndexed, foundNumberOfOperationsIndexed); - } - - assertThat(numberOfOperationsReceived, greaterThanOrEqualTo(1)); - assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1)); - } - - private static void verifyAutoFollowMonitoring() throws IOException { - Request request = new Request("GET", "/.monitoring-*/_search"); - request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"ccr_auto_follow_stats\"}}}"); - Map response; - try { - response = toMap(adminClient().performRequest(request)); - } catch (ResponseException e) { - throw new AssertionError("error while searching", e); - } - - int numberOfSuccessfulFollowIndices = 0; - - List hits = (List) XContentMapValues.extractValue("hits.hits", response); - assertThat(hits.size(), greaterThanOrEqualTo(1)); - - for (int i = 0; i < hits.size(); i++) { - Map hit = (Map) hits.get(i); - - int foundNumberOfOperationsReceived = - (int) XContentMapValues.extractValue("_source.ccr_auto_follow_stats.number_of_successful_follow_indices", hit); - numberOfSuccessfulFollowIndices = Math.max(numberOfSuccessfulFollowIndices, foundNumberOfOperationsReceived); - } - - assertThat(numberOfSuccessfulFollowIndices, greaterThanOrEqualTo(1)); - } - } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index b3b63723848..3e3661aae1a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -5,6 +5,7 @@ apply plugin: 'elasticsearch.standalone-test' dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') testCompile project(path: xpackModule('ccr'), configuration: 'runtime') + testCompile project(':x-pack:plugin:ccr:qa') } task leaderClusterTest(type: RestIntegTestTask) { @@ -18,7 +19,7 @@ leaderClusterTestCluster { } leaderClusterTestRunner { - systemProperty 'tests.is_leader_cluster', 'true' + systemProperty 'tests.target_cluster', 'leader' } task followClusterTest(type: RestIntegTestTask) {} @@ -33,7 +34,7 @@ followClusterTestCluster { } followClusterTestRunner { - systemProperty 'tests.is_leader_cluster', 'false' + systemProperty 'tests.target_cluster', 'follow' systemProperty 'tests.leader_host', "${-> leaderClusterTest.nodes.get(0).httpUri()}" finalizedBy 'leaderClusterTestCluster#stop' } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index ff7dc9e72b5..9383d653de6 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -5,44 +5,23 @@ */ package org.elasticsearch.xpack.ccr; -import org.apache.http.HttpHost; -import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.ESRestTestCase; -import java.io.IOException; -import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -public class FollowIndexIT extends ESRestTestCase { - - private final boolean runningAgainstLeaderCluster = Booleans.parseBoolean(System.getProperty("tests.is_leader_cluster")); - - @Override - protected boolean preserveClusterUponCompletion() { - return true; - } +public class FollowIndexIT extends ESCCRRestTestCase { public void testFollowIndex() throws Exception { final int numDocs = 128; final String leaderIndexName = "test_index1"; - if (runningAgainstLeaderCluster) { + if ("leader".equals(targetCluster)) { logger.info("Running against leader cluster"); String mapping = ""; if (randomBoolean()) { // randomly do source filtering on indexing @@ -63,12 +42,12 @@ public class FollowIndexIT extends ESRestTestCase { index(client(), leaderIndexName, Integer.toString(i), "field", i, "filtered_field", "true"); } refresh(leaderIndexName); - verifyDocuments(leaderIndexName, numDocs); + verifyDocuments(leaderIndexName, numDocs, "filtered_field:true"); } else { logger.info("Running against follow cluster"); final String followIndexName = "test_index2"; followIndex(leaderIndexName, followIndexName); - assertBusy(() -> verifyDocuments(followIndexName, numDocs)); + assertBusy(() -> verifyDocuments(followIndexName, numDocs, "filtered_field:true")); // unfollow and then follow and then index a few docs in leader index: pauseFollow(followIndexName); resumeFollow(followIndexName); @@ -78,7 +57,7 @@ public class FollowIndexIT extends ESRestTestCase { index(leaderClient, leaderIndexName, Integer.toString(id + 1), "field", id + 1, "filtered_field", "true"); index(leaderClient, leaderIndexName, Integer.toString(id + 2), "field", id + 2, "filtered_field", "true"); } - assertBusy(() -> verifyDocuments(followIndexName, numDocs + 3)); + assertBusy(() -> verifyDocuments(followIndexName, numDocs + 3, "filtered_field:true")); assertBusy(() -> verifyCcrMonitoring(leaderIndexName, followIndexName)); pauseFollow(followIndexName); @@ -90,7 +69,7 @@ public class FollowIndexIT extends ESRestTestCase { } public void testFollowNonExistingLeaderIndex() throws Exception { - assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); + assumeFalse("Test should only run when both clusters are running", "leader".equals(targetCluster)); ResponseException e = expectThrows(ResponseException.class, () -> resumeFollow("non-existing-index")); assertThat(e.getMessage(), containsString("no such index")); assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); @@ -101,7 +80,7 @@ public class FollowIndexIT extends ESRestTestCase { } public void testAutoFollowPatterns() throws Exception { - assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); + assumeFalse("Test should only run when both clusters are running", "leader".equals(targetCluster)); Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"leader_cluster\": \"leader_cluster\"}"); @@ -128,7 +107,7 @@ public class FollowIndexIT extends ESRestTestCase { assertThat(response.get("number_of_successful_follow_indices"), equalTo(1)); ensureYellow("logs-20190101"); - verifyDocuments("logs-20190101", 5); + verifyDocuments("logs-20190101", 5, "filtered_field:true"); }); assertBusy(() -> { verifyCcrMonitoring("logs-20190101", "logs-20190101"); @@ -136,143 +115,4 @@ public class FollowIndexIT extends ESRestTestCase { }); } - private static void index(RestClient client, String index, String id, Object... fields) throws IOException { - XContentBuilder document = jsonBuilder().startObject(); - for (int i = 0; i < fields.length; i += 2) { - document.field((String) fields[i], fields[i + 1]); - } - document.endObject(); - final Request request = new Request("POST", "/" + index + "/_doc/" + id); - request.setJsonEntity(Strings.toString(document)); - assertOK(client.performRequest(request)); - } - - private static void refresh(String index) throws IOException { - assertOK(client().performRequest(new Request("POST", "/" + index + "/_refresh"))); - } - - private static void resumeFollow(String followIndex) throws IOException { - final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); - request.setJsonEntity("{\"poll_timeout\": \"10ms\"}"); - assertOK(client().performRequest(request)); - } - - private static void followIndex(String leaderIndex, String followIndex) throws IOException { - final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); - request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"" + leaderIndex + - "\", \"poll_timeout\": \"10ms\"}"); - assertOK(client().performRequest(request)); - } - - private static void pauseFollow(String followIndex) throws IOException { - assertOK(client().performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); - } - - private static void verifyDocuments(String index, int expectedNumDocs) throws IOException { - final Request request = new Request("GET", "/" + index + "/_search"); - request.addParameter("size", Integer.toString(expectedNumDocs)); - request.addParameter("sort", "field:asc"); - request.addParameter("q", "filtered_field:true"); - Map response = toMap(client().performRequest(request)); - - int numDocs = (int) XContentMapValues.extractValue("hits.total", response); - assertThat(numDocs, equalTo(expectedNumDocs)); - - List hits = (List) XContentMapValues.extractValue("hits.hits", response); - assertThat(hits.size(), equalTo(expectedNumDocs)); - for (int i = 0; i < expectedNumDocs; i++) { - int value = (int) XContentMapValues.extractValue("_source.field", (Map) hits.get(i)); - assertThat(i, equalTo(value)); - } - } - - private static void verifyCcrMonitoring(final String expectedLeaderIndex, final String expectedFollowerIndex) throws IOException { - Request request = new Request("GET", "/.monitoring-*/_search"); - request.setJsonEntity("{\"query\": {\"term\": {\"ccr_stats.leader_index\": \"" + expectedLeaderIndex + "\"}}}"); - Map response; - try { - response = toMap(client().performRequest(request)); - } catch (ResponseException e) { - throw new AssertionError("error while searching", e); - } - - int numberOfOperationsReceived = 0; - int numberOfOperationsIndexed = 0; - - List hits = (List) XContentMapValues.extractValue("hits.hits", response); - assertThat(hits.size(), greaterThanOrEqualTo(1)); - - for (int i = 0; i < hits.size(); i++) { - Map hit = (Map) hits.get(i); - String leaderIndex = (String) XContentMapValues.extractValue("_source.ccr_stats.leader_index", hit); - assertThat(leaderIndex, endsWith(expectedLeaderIndex)); - - final String followerIndex = (String) XContentMapValues.extractValue("_source.ccr_stats.follower_index", hit); - assertThat(followerIndex, equalTo(expectedFollowerIndex)); - - int foundNumberOfOperationsReceived = - (int) XContentMapValues.extractValue("_source.ccr_stats.operations_received", hit); - numberOfOperationsReceived = Math.max(numberOfOperationsReceived, foundNumberOfOperationsReceived); - int foundNumberOfOperationsIndexed = - (int) XContentMapValues.extractValue("_source.ccr_stats.number_of_operations_indexed", hit); - numberOfOperationsIndexed = Math.max(numberOfOperationsIndexed, foundNumberOfOperationsIndexed); - } - - assertThat(numberOfOperationsReceived, greaterThanOrEqualTo(1)); - assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1)); - } - - private static void verifyAutoFollowMonitoring() throws IOException { - Request request = new Request("GET", "/.monitoring-*/_search"); - request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"ccr_auto_follow_stats\"}}}"); - Map response; - try { - response = toMap(client().performRequest(request)); - } catch (ResponseException e) { - throw new AssertionError("error while searching", e); - } - - int numberOfSuccessfulFollowIndices = 0; - - List hits = (List) XContentMapValues.extractValue("hits.hits", response); - assertThat(hits.size(), greaterThanOrEqualTo(1)); - - for (int i = 0; i < hits.size(); i++) { - Map hit = (Map) hits.get(i); - - int foundNumberOfOperationsReceived = - (int) XContentMapValues.extractValue("_source.ccr_auto_follow_stats.number_of_successful_follow_indices", hit); - numberOfSuccessfulFollowIndices = Math.max(numberOfSuccessfulFollowIndices, foundNumberOfOperationsReceived); - } - - assertThat(numberOfSuccessfulFollowIndices, greaterThanOrEqualTo(1)); - } - - private static Map toMap(Response response) throws IOException { - return toMap(EntityUtils.toString(response.getEntity())); - } - - private static Map toMap(String response) { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); - } - - private static void ensureYellow(String index) throws IOException { - Request request = new Request("GET", "/_cluster/health/" + index); - request.addParameter("wait_for_status", "yellow"); - request.addParameter("wait_for_no_relocating_shards", "true"); - request.addParameter("wait_for_no_initializing_shards", "true"); - request.addParameter("timeout", "70s"); - request.addParameter("level", "shards"); - client().performRequest(request); - } - - private RestClient buildLeaderClient() throws IOException { - assert runningAgainstLeaderCluster == false; - String leaderUrl = System.getProperty("tests.leader_host"); - int portSeparator = leaderUrl.lastIndexOf(':'); - HttpHost httpHost = new HttpHost(leaderUrl.substring(0, portSeparator), - Integer.parseInt(leaderUrl.substring(portSeparator + 1)), getProtocol()); - return buildClient(Settings.EMPTY, new HttpHost[]{httpHost}); - } - } diff --git a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java new file mode 100644 index 00000000000..b2e300ec4be --- /dev/null +++ b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java @@ -0,0 +1,245 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr; + +import org.apache.http.HttpHost; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; + +public class ESCCRRestTestCase extends ESRestTestCase { + + protected final String targetCluster = System.getProperty("tests.target_cluster"); + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } + + protected static void index(String index, String id, Object... fields) throws IOException { + index(adminClient(), index, id, fields); + } + + protected static void index(RestClient client, String index, String id, Object... fields) throws IOException { + XContentBuilder document = jsonBuilder().startObject(); + for (int i = 0; i < fields.length; i += 2) { + document.field((String) fields[i], fields[i + 1]); + } + document.endObject(); + final Request request = new Request("POST", "/" + index + "/_doc/" + id); + request.setJsonEntity(Strings.toString(document)); + assertOK(client.performRequest(request)); + } + + protected static void refresh(String index) throws IOException { + assertOK(adminClient().performRequest(new Request("POST", "/" + index + "/_refresh"))); + } + + protected static void resumeFollow(String followIndex) throws IOException { + final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); + request.setJsonEntity("{\"poll_timeout\": \"10ms\"}"); + assertOK(client().performRequest(request)); + } + + protected static void followIndex(String leaderIndex, String followIndex) throws IOException { + followIndex("leader_cluster", leaderIndex, followIndex); + } + + protected static void followIndex(String leaderCluster, String leaderIndex, String followIndex) throws IOException { + followIndex(client(), leaderCluster, leaderIndex, followIndex); + } + + protected static void followIndex(RestClient client, String leaderCluster, String leaderIndex, String followIndex) throws IOException { + final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); + request.setJsonEntity("{\"leader_cluster\": \"" + leaderCluster + "\", \"leader_index\": \"" + leaderIndex + + "\", \"poll_timeout\": \"10ms\"}"); + assertOK(client.performRequest(request)); + } + + protected static void pauseFollow(String followIndex) throws IOException { + pauseFollow(client(), followIndex); + } + + protected static void pauseFollow(RestClient client, String followIndex) throws IOException { + assertOK(client.performRequest(new Request("POST", "/" + followIndex + "/_ccr/pause_follow"))); + } + + protected static void verifyDocuments(final String index, final int expectedNumDocs, final String query) throws IOException { + verifyDocuments(index, expectedNumDocs, query, adminClient()); + } + + protected static void verifyDocuments(final String index, + final int expectedNumDocs, + final String query, + final RestClient client) throws IOException { + final Request request = new Request("GET", "/" + index + "/_search"); + request.addParameter("size", Integer.toString(expectedNumDocs)); + request.addParameter("sort", "field:asc"); + request.addParameter("q", query); + Map response = toMap(client.performRequest(request)); + + int numDocs = (int) XContentMapValues.extractValue("hits.total", response); + assertThat(numDocs, equalTo(expectedNumDocs)); + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), equalTo(expectedNumDocs)); + for (int i = 0; i < expectedNumDocs; i++) { + int value = (int) XContentMapValues.extractValue("_source.field", (Map) hits.get(i)); + assertThat(i, equalTo(value)); + } + } + + protected static void verifyCcrMonitoring(final String expectedLeaderIndex, final String expectedFollowerIndex) throws IOException { + Request request = new Request("GET", "/.monitoring-*/_search"); + request.setJsonEntity("{\"query\": {\"term\": {\"ccr_stats.leader_index\": \"" + expectedLeaderIndex + "\"}}}"); + Map response; + try { + response = toMap(adminClient().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } + + int numberOfOperationsReceived = 0; + int numberOfOperationsIndexed = 0; + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), greaterThanOrEqualTo(1)); + + for (int i = 0; i < hits.size(); i++) { + Map hit = (Map) hits.get(i); + String leaderIndex = (String) XContentMapValues.extractValue("_source.ccr_stats.leader_index", hit); + assertThat(leaderIndex, endsWith(expectedLeaderIndex)); + + final String followerIndex = (String) XContentMapValues.extractValue("_source.ccr_stats.follower_index", hit); + assertThat(followerIndex, equalTo(expectedFollowerIndex)); + + int foundNumberOfOperationsReceived = + (int) XContentMapValues.extractValue("_source.ccr_stats.operations_received", hit); + numberOfOperationsReceived = Math.max(numberOfOperationsReceived, foundNumberOfOperationsReceived); + int foundNumberOfOperationsIndexed = + (int) XContentMapValues.extractValue("_source.ccr_stats.number_of_operations_indexed", hit); + numberOfOperationsIndexed = Math.max(numberOfOperationsIndexed, foundNumberOfOperationsIndexed); + } + + assertThat(numberOfOperationsReceived, greaterThanOrEqualTo(1)); + assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1)); + } + + protected static void verifyAutoFollowMonitoring() throws IOException { + Request request = new Request("GET", "/.monitoring-*/_search"); + request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"ccr_auto_follow_stats\"}}}"); + Map response; + try { + response = toMap(adminClient().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } + + int numberOfSuccessfulFollowIndices = 0; + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), greaterThanOrEqualTo(1)); + + for (int i = 0; i < hits.size(); i++) { + Map hit = (Map) hits.get(i); + + int foundNumberOfOperationsReceived = + (int) XContentMapValues.extractValue("_source.ccr_auto_follow_stats.number_of_successful_follow_indices", hit); + numberOfSuccessfulFollowIndices = Math.max(numberOfSuccessfulFollowIndices, foundNumberOfOperationsReceived); + } + + assertThat(numberOfSuccessfulFollowIndices, greaterThanOrEqualTo(1)); + } + + protected static Map toMap(Response response) throws IOException { + return toMap(EntityUtils.toString(response.getEntity())); + } + + protected static Map toMap(String response) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); + } + + protected static void ensureYellow(String index) throws IOException { + Request request = new Request("GET", "/_cluster/health/" + index); + request.addParameter("wait_for_status", "yellow"); + request.addParameter("wait_for_no_relocating_shards", "true"); + request.addParameter("wait_for_no_initializing_shards", "true"); + request.addParameter("timeout", "70s"); + request.addParameter("level", "shards"); + adminClient().performRequest(request); + } + + protected int countCcrNodeTasks() throws IOException { + final Request request = new Request("GET", "/_tasks"); + request.addParameter("detailed", "true"); + Map rsp1 = toMap(adminClient().performRequest(request)); + Map nodes = (Map) rsp1.get("nodes"); + assertThat(nodes.size(), equalTo(1)); + Map node = (Map) nodes.values().iterator().next(); + Map nodeTasks = (Map) node.get("tasks"); + int numNodeTasks = 0; + for (Map.Entry entry : nodeTasks.entrySet()) { + Map nodeTask = (Map) entry.getValue(); + String action = (String) nodeTask.get("action"); + if (action.startsWith("xpack/ccr/shard_follow_task")) { + numNodeTasks++; + } + } + return numNodeTasks; + } + + protected static void createIndex(String name, Settings settings) throws IOException { + createIndex(name, settings, ""); + } + + protected static void createIndex(String name, Settings settings, String mapping) throws IOException { + final Request request = new Request("PUT", "/" + name); + request.setJsonEntity("{ \"settings\": " + Strings.toString(settings) + ", \"mappings\" : {" + mapping + "} }"); + assertOK(adminClient().performRequest(request)); + } + + protected static boolean indexExists(String index) throws IOException { + Response response = adminClient().performRequest(new Request("HEAD", "/" + index)); + return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); + } + + protected RestClient buildLeaderClient() throws IOException { + assert "leader".equals(targetCluster) == false; + return buildClient(System.getProperty("tests.leader_host")); + } + + protected RestClient buildMiddleClient() throws IOException { + assert "middle".equals(targetCluster) == false; + return buildClient(System.getProperty("tests.middle_host")); + } + + private RestClient buildClient(final String url) throws IOException { + int portSeparator = url.lastIndexOf(':'); + HttpHost httpHost = new HttpHost(url.substring(0, portSeparator), + Integer.parseInt(url.substring(portSeparator + 1)), getProtocol()); + return buildClient(restAdminSettings(), new HttpHost[]{httpHost}); + } + +} From 17adfeb20fd6ca31ddfda09ec61a07b5bfd3f24c Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Tue, 23 Oct 2018 15:59:11 -0600 Subject: [PATCH 13/67] HLRC: Standardize access in *RequestConverters (#34768) With the move to separate RequestConverters classes for each client, some of the access restrictions on the new classes are more open than the prior RequestConverters classes. This standardizes the *RequestConverters classes as package-private, final, and with a private constructor so that no instances of the can be inadvertently created. --- .../org/elasticsearch/client/ClusterRequestConverters.java | 2 ++ .../org/elasticsearch/client/GraphRequestConverters.java | 4 +++- .../org/elasticsearch/client/IndicesRequestConverters.java | 5 ++++- .../org/elasticsearch/client/IngestRequestConverters.java | 4 +++- .../org/elasticsearch/client/LicenseRequestConverters.java | 5 ++++- .../org/elasticsearch/client/MigrationRequestConverters.java | 4 +++- .../org/elasticsearch/client/SnapshotRequestConverters.java | 4 +++- .../org/elasticsearch/client/TasksRequestConverters.java | 4 +++- .../org/elasticsearch/client/WatcherRequestConverters.java | 4 +++- .../org/elasticsearch/client/XPackRequestConverters.java | 4 +++- 10 files changed, 31 insertions(+), 9 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterRequestConverters.java index d6c41e804df..4da8d128b98 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterRequestConverters.java @@ -31,6 +31,8 @@ import java.io.IOException; final class ClusterRequestConverters { + private ClusterRequestConverters() {} + static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest) throws IOException { Request request = new Request(HttpPut.METHOD_NAME, "/_cluster/settings"); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java index c1f1e1d115f..f5387047db1 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java @@ -24,7 +24,9 @@ import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import java.io.IOException; -public class GraphRequestConverters { +final class GraphRequestConverters { + + private GraphRequestConverters() {} static Request explore(GraphExploreRequest exploreRequest) throws IOException { String endpoint = RequestConverters.endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore"); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java index 740b87107c1..ea81c88f8fe 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java @@ -53,7 +53,10 @@ import org.elasticsearch.common.Strings; import java.io.IOException; import java.util.Locale; -public class IndicesRequestConverters { +final class IndicesRequestConverters { + + private IndicesRequestConverters() {} + static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) { String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices()); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java index e81d716b60f..06b4c0fd62a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java @@ -30,7 +30,9 @@ import org.elasticsearch.action.ingest.SimulatePipelineRequest; import java.io.IOException; -public class IngestRequestConverters { +final class IngestRequestConverters { + + private IngestRequestConverters() {} static Request getPipeline(GetPipelineRequest getPipelineRequest) { String endpoint = new RequestConverters.EndpointBuilder() diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java index 0daf1c2d947..7bda5f552ff 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java @@ -29,7 +29,10 @@ import org.elasticsearch.client.license.DeleteLicenseRequest; import org.elasticsearch.client.license.GetLicenseRequest; import org.elasticsearch.client.license.PutLicenseRequest; -public class LicenseRequestConverters { +final class LicenseRequestConverters { + + private LicenseRequestConverters() {} + static Request putLicense(PutLicenseRequest putLicenseRequest) { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_xpack", "license").build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java index 2f5309350df..ddd1a2a4345 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java @@ -22,7 +22,9 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpGet; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; -public class MigrationRequestConverters { +final class MigrationRequestConverters { + + private MigrationRequestConverters() {} static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder() diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java index 7ddd0892585..93fb10bd561 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java @@ -36,7 +36,9 @@ import org.elasticsearch.common.Strings; import java.io.IOException; -public class SnapshotRequestConverters { +final class SnapshotRequestConverters { + + private SnapshotRequestConverters() {} static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) { String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java index 93b407a82fe..45723dcc938 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java @@ -24,7 +24,9 @@ import org.apache.http.client.methods.HttpPost; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; -public class TasksRequestConverters { +final class TasksRequestConverters { + + private TasksRequestConverters() {} static Request cancelTasks(CancelTasksRequest cancelTasksRequest) { Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel"); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java index 68e51a64933..64ca53376d7 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java @@ -32,7 +32,9 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; -public class WatcherRequestConverters { +final class WatcherRequestConverters { + + private WatcherRequestConverters() {} static Request startWatchService(StartWatchServiceRequest startWatchServiceRequest) { String endpoint = new RequestConverters.EndpointBuilder() diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java index 1d5b9a41858..9e0c1527403 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java @@ -27,7 +27,9 @@ import java.util.EnumSet; import java.util.Locale; import java.util.stream.Collectors; -public class XPackRequestConverters { +final class XPackRequestConverters { + + private XPackRequestConverters() {} static Request info(XPackInfoRequest infoRequest) { Request request = new Request(HttpGet.METHOD_NAME, "/_xpack"); From 7580f59c9fa87a6361ad4e7182cbaed641ecf770 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 23 Oct 2018 14:27:47 -0700 Subject: [PATCH 14/67] Mute FullClusterRestartIT#testSqlFailsOnIndexWithTwoTypes while we await a fix. --- .../org/elasticsearch/xpack/restart/FullClusterRestartIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 8a6944fb870..c112709bbe0 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -414,6 +414,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34774") public void testSqlFailsOnIndexWithTwoTypes() throws IOException { // TODO this isn't going to trigger until we backport to 6.1 assumeTrue("It is only possible to build an index that sql doesn't like before 6.0.0", From c5a07393810990234555ad00b48238b7f4a8846a Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Tue, 23 Oct 2018 15:26:39 -0700 Subject: [PATCH 15/67] Mute SettingsBasedHostProviderIT to avoid future test flakes. --- .../discovery/zen/SettingsBasedHostProviderIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/SettingsBasedHostProviderIT.java b/server/src/test/java/org/elasticsearch/discovery/zen/SettingsBasedHostProviderIT.java index 429950bf853..52f3a05ce08 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/SettingsBasedHostProviderIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/SettingsBasedHostProviderIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.discovery.zen; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.common.settings.Settings; @@ -29,6 +30,7 @@ import static org.elasticsearch.discovery.zen.SettingsBasedHostsProvider.DISCOVE import static org.elasticsearch.discovery.zen.SettingsBasedHostsProvider.LIMIT_LOCAL_PORTS_COUNT; import static org.elasticsearch.transport.TcpTransport.PORT; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34781") @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class SettingsBasedHostProviderIT extends ESIntegTestCase { From da20dfd81c46c61302d32c7d4eb9a56cf24e33fc Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Tue, 23 Oct 2018 16:35:10 -0600 Subject: [PATCH 16/67] Add cluster-wide shard limit warnings (#34021) In a future major version, we will be introducing a soft limit on the number of shards in a cluster based on the number of nodes in the cluster. This limit will be configurable, and checked on operations which create or open shards and issue a warning if the operation would take the cluster over the limit. There is an option to enable strict enforcement of the limit, which turns the warnings into errors. In a future release, the option will be removed and strict enforcement will be the default (and only) behavior. --- .../migration/migrate_7_0/cluster.asciidoc | 7 + docs/reference/modules/cluster/misc.asciidoc | 44 +++++- .../cluster/metadata/MetaData.java | 30 +++- .../metadata/MetaDataCreateIndexService.java | 27 +++- .../metadata/MetaDataIndexStateService.java | 34 +++++ .../MetaDataUpdateSettingsService.java | 24 +++ .../common/settings/ClusterSettings.java | 1 + .../elasticsearch/indices/IndicesService.java | 54 +++++++ .../snapshots/RestoreService.java | 2 +- .../MetaDataCreateIndexServiceTests.java | 32 ++++ .../MetaDataIndexStateServiceTests.java | 99 +++++++++++++ .../cluster/shards/ClusterShardLimitIT.java | 140 ++++++++++++++++++ .../indices/IndicesServiceTests.java | 78 ++++++++++ .../indices/cluster/ClusterStateChanges.java | 1 + .../deprecation/ClusterDeprecationChecks.java | 30 ++++ .../xpack/deprecation/DeprecationChecks.java | 2 +- 16 files changed, 597 insertions(+), 8 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java create mode 100644 x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java diff --git a/docs/reference/migration/migrate_7_0/cluster.asciidoc b/docs/reference/migration/migrate_7_0/cluster.asciidoc index d518d29987d..7343154175b 100644 --- a/docs/reference/migration/migrate_7_0/cluster.asciidoc +++ b/docs/reference/migration/migrate_7_0/cluster.asciidoc @@ -18,3 +18,10 @@ primary shards of the opened index to be allocated. [float] ==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences. + +[float] +==== Cluster-wide shard soft limit +Clusters now have soft limits on the total number of open shards in the cluster +based on the number of nodes and the `cluster.max_shards_per_node` cluster +setting, to prevent accidental operations that would destabilize the cluster. +More information can be found in the <>. \ No newline at end of file diff --git a/docs/reference/modules/cluster/misc.asciidoc b/docs/reference/modules/cluster/misc.asciidoc index 3f12bd255de..f397c3075b7 100644 --- a/docs/reference/modules/cluster/misc.asciidoc +++ b/docs/reference/modules/cluster/misc.asciidoc @@ -22,6 +22,48 @@ user with access to the <> API can make the cluster read-write again. +[[cluster-shard-limit]] + +==== Cluster Shard Limit + +In a Elasticsearch 7.0 and later, there will be a soft limit on the number of +shards in a cluster, based on the number of nodes in the cluster. This is +intended to prevent operations which may unintentionally destabilize the +cluster. Prior to 7.0, actions which would result in the cluster going over the +limit will issue a deprecation warning. + +NOTE: You can set the system property `es.enforce_max_shards_per_node` to `true` +to opt in to strict enforcement of the shard limit. If this system property is +set, actions which would result in the cluster going over the limit will result +in an error, rather than a deprecation warning. This property will be removed in +Elasticsearch 7.0, as strict enforcement of the limit will be the default and +only behavior. + +If an operation, such as creating a new index, restoring a snapshot of an index, +or opening a closed index would lead to the number of shards in the cluster +going over this limit, the operation will issue a deprecation warning. + +If the cluster is already over the limit, due to changes in node membership or +setting changes, all operations that create or open indices will issue warnings +until either the limit is increased as described below, or some indices are +<> or <> to bring the +number of shards below the limit. + +Replicas count towards this limit, but closed indexes do not. An index with 5 +primary shards and 2 replicas will be counted as 15 shards. Any closed index +is counted as 0, no matter how many shards and replicas it contains. + +The limit defaults to 1,000 shards per node, and be dynamically adjusted using +the following property: + +`cluster.max_shards_per_node`:: + + Controls the number of shards allowed in the cluster per node. + +For example, a 3-node cluster with the default setting would allow 3,000 shards +total, across all open indexes. If the above setting is changed to 1,500, then +the cluster would allow 4,500 shards total. + [[user-defined-data]] ==== User Defined Cluster Metadata @@ -109,4 +151,4 @@ Enable or disable allocation for persistent tasks: This setting does not affect the persistent tasks that are already being executed. Only newly created persistent tasks, or tasks that must be reassigned (after a node left the cluster, for example), are impacted by this setting. --- \ No newline at end of file +-- diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index bafbea2e727..8653df73c41 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.apache.logging.log4j.Logger; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.AliasesRequest; @@ -124,9 +123,11 @@ public class MetaData implements Iterable, Diffable, To public interface Custom extends NamedDiffable, ToXContentFragment, ClusterState.FeatureAware { EnumSet context(); - } + public static final Setting SETTING_CLUSTER_MAX_SHARDS_PER_NODE = + Setting.intSetting("cluster.max_shards_per_node", 1000, 1, Property.Dynamic, Property.NodeScope); + public static final Setting SETTING_READ_ONLY_SETTING = Setting.boolSetting("cluster.blocks.read_only", false, Property.Dynamic, Property.NodeScope); @@ -162,6 +163,7 @@ public class MetaData implements Iterable, Diffable, To private final ImmutableOpenMap customs; private final transient int totalNumberOfShards; // Transient ? not serializable anyway? + private final int totalOpenIndexShards; private final int numberOfShards; private final String[] allIndices; @@ -183,12 +185,17 @@ public class MetaData implements Iterable, Diffable, To this.customs = customs; this.templates = templates; int totalNumberOfShards = 0; + int totalOpenIndexShards = 0; int numberOfShards = 0; for (ObjectCursor cursor : indices.values()) { totalNumberOfShards += cursor.value.getTotalNumberOfShards(); numberOfShards += cursor.value.getNumberOfShards(); + if (IndexMetaData.State.OPEN.equals(cursor.value.getState())) { + totalOpenIndexShards += cursor.value.getTotalNumberOfShards(); + } } this.totalNumberOfShards = totalNumberOfShards; + this.totalOpenIndexShards = totalOpenIndexShards; this.numberOfShards = numberOfShards; this.allIndices = allIndices; @@ -667,10 +674,29 @@ public class MetaData implements Iterable, Diffable, To } + /** + * Gets the total number of shards from all indices, including replicas and + * closed indices. + * @return The total number shards from all indices. + */ public int getTotalNumberOfShards() { return this.totalNumberOfShards; } + /** + * Gets the total number of open shards from all indices. Includes + * replicas, but does not include shards that are part of closed indices. + * @return The total number of open shards from all indices. + */ + public int getTotalOpenIndexShards() { + return this.totalOpenIndexShards; + } + + /** + * Gets the number of primary shards from all indices, not including + * replicas. + * @return The number of primary shards from all indices. + */ public int getNumberOfShards() { return this.numberOfShards; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 9466b03c442..c327da8afee 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -53,6 +53,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -82,6 +83,7 @@ import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; @@ -587,12 +589,16 @@ public class MetaDataCreateIndexService extends AbstractComponent { private void validate(CreateIndexClusterStateUpdateRequest request, ClusterState state) { validateIndexName(request.index(), state); - validateIndexSettings(request.index(), request.settings(), forbidPrivateIndexSettings); + validateIndexSettings(request.index(), request.settings(), state, forbidPrivateIndexSettings); } - public void validateIndexSettings( - final String indexName, final Settings settings, final boolean forbidPrivateIndexSettings) throws IndexCreationException { + public void validateIndexSettings(String indexName, final Settings settings, final ClusterState clusterState, + final boolean forbidPrivateIndexSettings) throws IndexCreationException { List validationErrors = getIndexSettingsValidationErrors(settings, forbidPrivateIndexSettings); + + Optional shardAllocation = checkShardLimit(settings, clusterState, deprecationLogger); + shardAllocation.ifPresent(validationErrors::add); + if (validationErrors.isEmpty() == false) { ValidationException validationException = new ValidationException(); validationException.addValidationErrors(validationErrors); @@ -600,6 +606,21 @@ public class MetaDataCreateIndexService extends AbstractComponent { } } + /** + * Checks whether an index can be created without going over the cluster shard limit. + * + * @param settings The settings of the index to be created. + * @param clusterState The current cluster state. + * @param deprecationLogger The logger to use to emit a deprecation warning, if appropriate. + * @return If present, an error message to be used to reject index creation. If empty, a signal that this operation may be carried out. + */ + static Optional checkShardLimit(Settings settings, ClusterState clusterState, DeprecationLogger deprecationLogger) { + int shardsToCreate = IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.get(settings) + * (1 + IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.get(settings)); + + return IndicesService.checkShardLimit(shardsToCreate, clusterState, deprecationLogger); + } + List getIndexSettingsValidationErrors(final Settings settings, final boolean forbidPrivateIndexSettings) { String customPath = IndexMetaData.INDEX_DATA_PATH_SETTING.get(settings); List validationErrors = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index 038c03f342a..f7482edd10d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -36,8 +36,10 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.indices.IndicesService; @@ -50,6 +52,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; +import java.util.Optional; import java.util.Set; /** @@ -175,6 +178,8 @@ public class MetaDataIndexStateService extends AbstractComponent { } } + validateShardLimit(currentState, request.indices(), deprecationLogger); + if (indicesToOpen.isEmpty()) { return currentState; } @@ -217,4 +222,33 @@ public class MetaDataIndexStateService extends AbstractComponent { }); } + /** + * Validates whether a list of indices can be opened without going over the cluster shard limit. Only counts indices which are + * currently closed and will be opened, ignores indices which are already open. + * + * @param currentState The current cluster state. + * @param indices The indices which are to be opened. + * @param deprecationLogger The logger to use to emit a deprecation warning, if appropriate. + * @throws ValidationException If this operation would take the cluster over the limit and enforcement is enabled. + */ + static void validateShardLimit(ClusterState currentState, Index[] indices, DeprecationLogger deprecationLogger) { + int shardsToOpen = Arrays.stream(indices) + .filter(index -> currentState.metaData().index(index).getState().equals(IndexMetaData.State.CLOSE)) + .mapToInt(index -> getTotalShardCount(currentState, index)) + .sum(); + + Optional error = IndicesService.checkShardLimit(shardsToOpen, currentState, deprecationLogger); + if (error.isPresent()) { + ValidationException ex = new ValidationException(); + ex.addValidationError(error.get()); + throw ex; + } + + } + + private static int getTotalShardCount(ClusterState state, Index index) { + IndexMetaData indexMetaData = state.metaData().index(index); + return indexMetaData.getNumberOfShards() * (1 + indexMetaData.getNumberOfReplicas()); + } + } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 75fcdced678..c89e6ddba95 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; @@ -45,9 +46,11 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.util.Arrays; import java.util.HashSet; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.Set; import static org.elasticsearch.action.support.ContextPreservingActionListener.wrapPreservingContext; @@ -115,6 +118,7 @@ public class MetaDataUpdateSettingsService extends AbstractComponent { @Override public ClusterState execute(ClusterState currentState) { + RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable()); MetaData.Builder metaDataBuilder = MetaData.builder(currentState.metaData()); @@ -141,6 +145,18 @@ public class MetaDataUpdateSettingsService extends AbstractComponent { int updatedNumberOfReplicas = openSettings.getAsInt(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, -1); if (updatedNumberOfReplicas != -1 && preserveExisting == false) { + + // Verify that this won't take us over the cluster shard limit. + int totalNewShards = Arrays.stream(request.indices()) + .mapToInt(i -> getTotalNewShards(i, currentState, updatedNumberOfReplicas)) + .sum(); + Optional error = IndicesService.checkShardLimit(totalNewShards, currentState, deprecationLogger); + if (error.isPresent()) { + ValidationException ex = new ValidationException(); + ex.addValidationError(error.get()); + throw ex; + } + // we do *not* update the in sync allocation ids as they will be removed upon the first index // operation which make these copies stale // TODO: update the list once the data is deleted by the node? @@ -224,6 +240,14 @@ public class MetaDataUpdateSettingsService extends AbstractComponent { }); } + private int getTotalNewShards(Index index, ClusterState currentState, int updatedNumberOfReplicas) { + IndexMetaData indexMetaData = currentState.metaData().index(index); + int shardsInIndex = indexMetaData.getNumberOfShards(); + int oldNumberOfReplicas = indexMetaData.getNumberOfReplicas(); + int replicaIncrease = updatedNumberOfReplicas - oldNumberOfReplicas; + return replicaIncrease * shardsInIndex; + } + /** * Updates the cluster block only iff the setting exists in the given settings */ diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 4b4ebb7414a..66a4aa65c44 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -196,6 +196,7 @@ public final class ClusterSettings extends AbstractScopedSettings { MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, MetaData.SETTING_READ_ONLY_SETTING, MetaData.SETTING_READ_ONLY_ALLOW_DELETE_SETTING, + MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE, RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING, RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING, diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 0e27d2bf150..206b9e7165a 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -38,6 +38,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.CheckedFunction; @@ -52,6 +53,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -156,6 +158,21 @@ public class IndicesService extends AbstractLifecycleComponent public static final String INDICES_SHARDS_CLOSED_TIMEOUT = "indices.shards_closed_timeout"; public static final Setting INDICES_CACHE_CLEAN_INTERVAL_SETTING = Setting.positiveTimeSetting("indices.cache.cleanup_interval", TimeValue.timeValueMinutes(1), Property.NodeScope); + private static final boolean ENFORCE_MAX_SHARDS_PER_NODE; + + static { + final String ENFORCE_SHARD_LIMIT_KEY = "es.enforce_max_shards_per_node"; + final String enforceMaxShardsPerNode = System.getProperty(ENFORCE_SHARD_LIMIT_KEY); + if (enforceMaxShardsPerNode == null) { + ENFORCE_MAX_SHARDS_PER_NODE = false; + } else if ("true".equals(enforceMaxShardsPerNode)) { + ENFORCE_MAX_SHARDS_PER_NODE = true; + } else { + throw new IllegalArgumentException(ENFORCE_SHARD_LIMIT_KEY + " may only be unset or set to [true] but was [" + + enforceMaxShardsPerNode + "]"); + } + } + private final PluginsService pluginsService; private final NodeEnvironment nodeEnv; private final NamedXContentRegistry xContentRegistry; @@ -1352,4 +1369,41 @@ public class IndicesService extends AbstractLifecycleComponent public boolean isMetaDataField(String field) { return mapperRegistry.isMetaDataField(field); } + + /** + * Checks to see if an operation can be performed without taking the cluster over the cluster-wide shard limit. Adds a deprecation + * warning or returns an error message as appropriate + * + * @param newShards The number of shards to be added by this operation + * @param state The current cluster state + * @param deprecationLogger The logger to use for deprecation warnings + * @return If present, an error message to be given as the reason for failing + * an operation. If empty, a sign that the operation is valid. + */ + public static Optional checkShardLimit(int newShards, ClusterState state, DeprecationLogger deprecationLogger) { + Settings theseSettings = state.metaData().settings(); + int nodeCount = state.getNodes().getDataNodes().size(); + + // Only enforce the shard limit if we have at least one data node, so that we don't block + // index creation during cluster setup + if (nodeCount == 0 || newShards < 0) { + return Optional.empty(); + } + int maxShardsPerNode = MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.get(theseSettings); + int maxShardsInCluster = maxShardsPerNode * nodeCount; + int currentOpenShards = state.getMetaData().getTotalOpenIndexShards(); + + if ((currentOpenShards + newShards) > maxShardsInCluster) { + String errorMessage = "this action would add [" + newShards + "] total shards, but this cluster currently has [" + + currentOpenShards + "]/[" + maxShardsInCluster + "] maximum shards open"; + if (ENFORCE_MAX_SHARDS_PER_NODE) { + return Optional.of(errorMessage); + } else { + deprecationLogger.deprecated("In a future major version, this request will fail because {}. Before upgrading, " + + "reduce the number of shards in your cluster or adjust the cluster setting [{}].", + errorMessage, MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey()); + } + } + return Optional.empty(); + } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 87ea8cb978f..791b59a1d5b 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -270,7 +270,7 @@ public class RestoreService extends AbstractComponent implements ClusterStateApp // Index doesn't exist - create it and start recovery // Make sure that the index we are about to create has a validate name MetaDataCreateIndexService.validateIndexName(renamedIndexName, currentState); - createIndexService.validateIndexSettings(renamedIndexName, snapshotIndexMetaData.getSettings(), false); + createIndexService.validateIndexSettings(renamedIndexName, snapshotIndexMetaData.getSettings(), currentState, false); IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN).index(renamedIndexName); indexMdBuilder.settings(Settings.builder().put(snapshotIndexMetaData.getSettings()).put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())); if (!request.includeAliases() && !snapshotIndexMetaData.getAliases().isEmpty()) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index abb34f80eac..efef803be84 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -34,7 +34,9 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.elasticsearch.cluster.shards.ClusterShardLimitIT; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -56,7 +58,11 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.emptyMap; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; +import static org.elasticsearch.cluster.shards.ClusterShardLimitIT.ShardCounts.forDataNodeCount; +import static org.elasticsearch.indices.IndicesServiceTests.createClusterForShardLimitTest; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -466,4 +472,30 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { assertEquals("ratio is not a power of two", intRatio, Integer.highestOneBit(intRatio)); } } + + public void testShardLimitDeprecationWarning() { + int nodesInCluster = randomIntBetween(2,100); + ClusterShardLimitIT.ShardCounts counts = forDataNodeCount(nodesInCluster); + Settings clusterSettings = Settings.builder() + .put(MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), counts.getShardsPerNode()) + .build(); + ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas(), + clusterSettings); + + Settings indexSettings = Settings.builder() + .put(SETTING_VERSION_CREATED, Version.CURRENT) + .put(SETTING_NUMBER_OF_SHARDS, counts.getFailingIndexShards()) + .put(SETTING_NUMBER_OF_REPLICAS, counts.getFailingIndexReplicas()) + .build(); + + DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + MetaDataCreateIndexService.checkShardLimit(indexSettings, state, deprecationLogger); + int totalShards = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas()); + int currentShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas()); + int maxShards = counts.getShardsPerNode() * nodesInCluster; + assertWarnings("In a future major version, this request will fail because this action would add [" + + totalShards + "] total shards, but this cluster currently has [" + currentShards + "]/[" + maxShards + "] maximum shards open."+ + " Before upgrading, reduce the number of shards in your cluster or adjust the cluster setting [cluster.max_shards_per_node]."); + } + } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java new file mode 100644 index 00000000000..55e2216edb5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateServiceTests.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.shards.ClusterShardLimitIT; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.stream.Collectors; + +import static org.elasticsearch.cluster.shards.ClusterShardLimitIT.ShardCounts.forDataNodeCount; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MetaDataIndexStateServiceTests extends ESTestCase { + + public void testValidateShardLimitDeprecationWarning() { + int nodesInCluster = randomIntBetween(2,100); + ClusterShardLimitIT.ShardCounts counts = forDataNodeCount(nodesInCluster); + Settings clusterSettings = Settings.builder() + .put(MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), counts.getShardsPerNode()) + .build(); + ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas(), + counts.getFailingIndexShards(), counts.getFailingIndexReplicas(), clusterSettings); + + Index[] indices = Arrays.stream(state.metaData().indices().values().toArray(IndexMetaData.class)) + .map(IndexMetaData::getIndex) + .collect(Collectors.toList()) + .toArray(new Index[2]); + + DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + MetaDataIndexStateService.validateShardLimit(state, indices, deprecationLogger); + int totalShards = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas()); + int currentShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas()); + int maxShards = counts.getShardsPerNode() * nodesInCluster; + assertWarnings("In a future major version, this request will fail because this action would add [" + + totalShards + "] total shards, but this cluster currently has [" + currentShards + "]/[" + maxShards + "] maximum shards open."+ + " Before upgrading, reduce the number of shards in your cluster or adjust the cluster setting [cluster.max_shards_per_node]."); + } + + public static ClusterState createClusterForShardLimitTest(int nodesInCluster, int openIndexShards, int openIndexReplicas, + int closedIndexShards, int closedIndexReplicas, Settings clusterSettings) { + ImmutableOpenMap.Builder dataNodes = ImmutableOpenMap.builder(); + for (int i = 0; i < nodesInCluster; i++) { + dataNodes.put(randomAlphaOfLengthBetween(5,15), mock(DiscoveryNode.class)); + } + DiscoveryNodes nodes = mock(DiscoveryNodes.class); + when(nodes.getDataNodes()).thenReturn(dataNodes.build()); + + IndexMetaData.Builder openIndexMetaData = IndexMetaData.builder(randomAlphaOfLengthBetween(5, 15)) + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .creationDate(randomLong()) + .numberOfShards(openIndexShards) + .numberOfReplicas(openIndexReplicas); + IndexMetaData.Builder closedIndexMetaData = IndexMetaData.builder(randomAlphaOfLengthBetween(5, 15)) + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .creationDate(randomLong()) + .state(IndexMetaData.State.CLOSE) + .numberOfShards(closedIndexShards) + .numberOfReplicas(closedIndexReplicas); + MetaData.Builder metaData = MetaData.builder().put(openIndexMetaData).put(closedIndexMetaData); + if (randomBoolean()) { + metaData.persistentSettings(clusterSettings); + } else { + metaData.transientSettings(clusterSettings); + } + + return ClusterState.builder(ClusterName.DEFAULT) + .metaData(metaData) + .nodes(nodes) + .build(); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java b/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java new file mode 100644 index 00000000000..f9958d3aba2 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.elasticsearch.cluster.shards; + +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESIntegTestCase; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) +public class ClusterShardLimitIT extends ESIntegTestCase { + private static final String shardsPerNodeKey = MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(); + + public void testSettingClusterMaxShards() { + int shardsPerNode = between(1, 500_000); + setShardsPerNode(shardsPerNode); + } + + public void testMinimumPerNode() { + int negativeShardsPerNode = between(-50_000, 0); + try { + if (frequently()) { + client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(shardsPerNodeKey, negativeShardsPerNode).build()) + .get(); + } else { + client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(shardsPerNodeKey, negativeShardsPerNode).build()) + .get(); + } + fail("should not be able to set negative shards per node"); + } catch (IllegalArgumentException ex) { + assertEquals("Failed to parse value [" + negativeShardsPerNode + "] for setting [cluster.max_shards_per_node] must be >= 1", + ex.getMessage()); + } + } + + private void setShardsPerNode(int shardsPerNode) { + try { + ClusterUpdateSettingsResponse response; + if (frequently()) { + response = client().admin().cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(shardsPerNodeKey, shardsPerNode).build()) + .get(); + assertEquals(shardsPerNode, response.getPersistentSettings().getAsInt(shardsPerNodeKey, -1).intValue()); + } else { + response = client().admin().cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(shardsPerNodeKey, shardsPerNode).build()) + .get(); + assertEquals(shardsPerNode, response.getTransientSettings().getAsInt(shardsPerNodeKey, -1).intValue()); + } + } catch (IllegalArgumentException ex) { + fail(ex.getMessage()); + } + } + + public static class ShardCounts { + private final int shardsPerNode; + + private final int firstIndexShards; + private final int firstIndexReplicas; + + private final int failingIndexShards; + private final int failingIndexReplicas; + + private ShardCounts(int shardsPerNode, + int firstIndexShards, + int firstIndexReplicas, + int failingIndexShards, + int failingIndexReplicas) { + this.shardsPerNode = shardsPerNode; + this.firstIndexShards = firstIndexShards; + this.firstIndexReplicas = firstIndexReplicas; + this.failingIndexShards = failingIndexShards; + this.failingIndexReplicas = failingIndexReplicas; + } + + public static ShardCounts forDataNodeCount(int dataNodes) { + int mainIndexReplicas = between(0, dataNodes - 1); + int mainIndexShards = between(1, 10); + int totalShardsInIndex = (mainIndexReplicas + 1) * mainIndexShards; + int shardsPerNode = (int) Math.ceil((double) totalShardsInIndex / dataNodes); + int totalCap = shardsPerNode * dataNodes; + + int failingIndexShards; + int failingIndexReplicas; + if (dataNodes > 1 && frequently()) { + failingIndexShards = Math.max(1, totalCap - totalShardsInIndex); + failingIndexReplicas = between(1, dataNodes - 1); + } else { + failingIndexShards = totalCap - totalShardsInIndex + between(1, 10); + failingIndexReplicas = 0; + } + + return new ShardCounts(shardsPerNode, mainIndexShards, mainIndexReplicas, failingIndexShards, failingIndexReplicas); + } + + public int getShardsPerNode() { + return shardsPerNode; + } + + public int getFirstIndexShards() { + return firstIndexShards; + } + + public int getFirstIndexReplicas() { + return firstIndexReplicas; + } + + public int getFailingIndexShards() { + return failingIndexShards; + } + + public int getFailingIndexReplicas() { + return failingIndexReplicas; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index b4e98775d97..cfac866895f 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -29,9 +29,14 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.cluster.shards.ClusterShardLimitIT; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -80,6 +85,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.elasticsearch.cluster.shards.ClusterShardLimitIT.ShardCounts.forDataNodeCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; @@ -567,4 +573,76 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { assertThat(e, hasToString(new RegexMatcher(pattern))); } + public void testOverShardLimit() { + int nodesInCluster = randomIntBetween(1,100); + ClusterShardLimitIT.ShardCounts counts = forDataNodeCount(nodesInCluster); + + Settings clusterSettings = Settings.builder() + .put(MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), counts.getShardsPerNode()) + .build(); + + ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas(), + clusterSettings); + + int shardsToAdd = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas()); + DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + Optional errorMessage = IndicesService.checkShardLimit(shardsToAdd, state, deprecationLogger); + + int totalShards = counts.getFailingIndexShards() * (1 + counts.getFailingIndexReplicas()); + int currentShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas()); + int maxShards = counts.getShardsPerNode() * nodesInCluster; + assertWarnings("In a future major version, this request will fail because this action would add [" + + totalShards + "] total shards, but this cluster currently has [" + currentShards + "]/[" + maxShards + "] maximum shards open."+ + " Before upgrading, reduce the number of shards in your cluster or adjust the cluster setting [cluster.max_shards_per_node]."); + assertFalse(errorMessage.isPresent()); + } + + public void testUnderShardLimit() { + int nodesInCluster = randomIntBetween(2,100); + // Calculate the counts for a cluster 1 node smaller than we have to ensure we have headroom + ClusterShardLimitIT.ShardCounts counts = forDataNodeCount(nodesInCluster - 1); + + Settings clusterSettings = Settings.builder() + .put(MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), counts.getShardsPerNode()) + .build(); + + ClusterState state = createClusterForShardLimitTest(nodesInCluster, counts.getFirstIndexShards(), counts.getFirstIndexReplicas(), + clusterSettings); + + int existingShards = counts.getFirstIndexShards() * (1 + counts.getFirstIndexReplicas()); + int shardsToAdd = randomIntBetween(1, (counts.getShardsPerNode() * nodesInCluster) - existingShards); + DeprecationLogger deprecationLogger = new DeprecationLogger(logger); + Optional errorMessage = IndicesService.checkShardLimit(shardsToAdd, state, deprecationLogger); + + assertFalse(errorMessage.isPresent()); + } + + public static ClusterState createClusterForShardLimitTest(int nodesInCluster, int shardsInIndex, int replicas, + Settings clusterSettings) { + ImmutableOpenMap.Builder dataNodes = ImmutableOpenMap.builder(); + for (int i = 0; i < nodesInCluster; i++) { + dataNodes.put(randomAlphaOfLengthBetween(5,15), mock(DiscoveryNode.class)); + } + DiscoveryNodes nodes = mock(DiscoveryNodes.class); + when(nodes.getDataNodes()).thenReturn(dataNodes.build()); + + IndexMetaData.Builder indexMetaData = IndexMetaData.builder(randomAlphaOfLengthBetween(5, 15)) + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .creationDate(randomLong()) + .numberOfShards(shardsInIndex) + .numberOfReplicas(replicas); + MetaData.Builder metaData = MetaData.builder().put(indexMetaData); + if (randomBoolean()) { + metaData.transientSettings(clusterSettings); + } else { + metaData.persistentSettings(clusterSettings); + } + + return ClusterState.builder(ClusterName.DEFAULT) + .metaData(metaData) + .nodes(nodes) + .build(); + } + + } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 77c77baaa11..3d8b1decea4 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -143,6 +143,7 @@ public class ClusterStateChanges extends AbstractComponent { // mocks clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); IndicesService indicesService = mock(IndicesService.class); // MetaDataCreateIndexService creates indices using its IndicesService instance to check mappings -> fake it here try { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java new file mode 100644 index 00000000000..7f11c2c2944 --- /dev/null +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/ClusterDeprecationChecks.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.deprecation; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; + +public class ClusterDeprecationChecks { + + static DeprecationIssue checkShardLimit(ClusterState state) { + int shardsPerNode = MetaData.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.get(state.metaData().settings()); + int nodeCount = state.getNodes().getDataNodes().size(); + int maxShardsInCluster = shardsPerNode * nodeCount; + int currentOpenShards = state.getMetaData().getTotalOpenIndexShards(); + + if (currentOpenShards >= maxShardsInCluster) { + return new DeprecationIssue(DeprecationIssue.Level.WARNING, + "Number of open shards exceeds cluster soft limit", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking_70_cluster_changes.html", + "There are [" + currentOpenShards + "] open shards in this cluster, but the cluster is limited to [" + + shardsPerNode + "] per data node, for [" + maxShardsInCluster + "] maximum."); + } + return null; + } +} diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index 83b72d47838..97c0498c3f6 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -29,7 +29,7 @@ public class DeprecationChecks { static List> CLUSTER_SETTINGS_CHECKS = Collections.unmodifiableList(Arrays.asList( - // STUB + ClusterDeprecationChecks::checkShardLimit )); static List, List, DeprecationIssue>> NODE_SETTINGS_CHECKS = From 596b5cf108083bb898c386cb620c8bbc2d87b742 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 23 Oct 2018 17:26:17 -0700 Subject: [PATCH 17/67] Test: Fix last reference to SearchScript (#34731) This was accidentally left over when converting to FieldScript. closes #34683 --- .../xpack/ml/transforms/PainlessDomainSplitIT.java | 5 ----- .../xpack/ml/MachineLearningPainlessExtension.java | 4 ++-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java index 34d58ef08bf..7af4453c2d4 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java @@ -176,11 +176,6 @@ public class PainlessDomainSplitIT extends ESRestTestCase { tests.add(new TestConfiguration(null, "shishi.xn--fiqs8s","shishi.xn--fiqs8s")); } - public void testEmptyToLetBuildPass() { - // remove this once one of the awaitsfixes are removed!! - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34683") public void testIsolated() throws Exception { Settings.Builder settings = Settings.builder() .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningPainlessExtension.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningPainlessExtension.java index b55936acd06..12d2626db74 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningPainlessExtension.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningPainlessExtension.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.ml; import org.elasticsearch.painless.spi.PainlessExtension; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.painless.spi.WhitelistLoader; +import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.SearchScript; import java.util.Collections; import java.util.List; @@ -21,6 +21,6 @@ public class MachineLearningPainlessExtension implements PainlessExtension { @Override public Map, List> getContextWhitelists() { - return Collections.singletonMap(SearchScript.CONTEXT, Collections.singletonList(WHITELIST)); + return Collections.singletonMap(FieldScript.CONTEXT, Collections.singletonList(WHITELIST)); } } From 8da1c9626a19853a692c30bbd30428e0a38c5f75 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 23 Oct 2018 18:07:53 -0700 Subject: [PATCH 18/67] Scripting: Add back params._source access in scripted metric aggs (#34777) Access to special variables _source and _fields were accidentally removed in recent refactorings. This commit adds them back, along with a test. closes #33884 --- .../ScriptedMetricAggContextsTests.java | 37 +++++++++- .../script/ScriptedMetricAggContexts.java | 72 +++++++++++++++---- 2 files changed, 95 insertions(+), 14 deletions(-) diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index 5c6fbc54667..2d33853b88f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -19,17 +19,26 @@ package org.elasticsearch.painless; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.Scorable; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptedMetricAggContexts; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.search.lookup.SourceLookup; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class ScriptedMetricAggContextsTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { @@ -57,7 +66,7 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { assertEquals(10, state.get("testField")); } - public void testMapBasic() { + public void testMapBasic() throws IOException { ScriptedMetricAggContexts.MapScript.Factory factory = scriptEngine.compile("test", "state.testField = 2*_score", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); @@ -82,6 +91,32 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase { assertEquals(1.0, state.get("testField")); } + public void testMapSourceAccess() throws IOException { + ScriptedMetricAggContexts.MapScript.Factory factory = scriptEngine.compile("test", + "state.testField = params._source.three", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + MemoryIndex index = new MemoryIndex(); + // we don't need a real index, just need to construct a LeafReaderContext which cannot be mocked + LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0); + + SearchLookup lookup = mock(SearchLookup.class); + LeafSearchLookup leafLookup = mock(LeafSearchLookup.class); + when(lookup.getLeafSearchLookup(leafReaderContext)).thenReturn(leafLookup); + SourceLookup sourceLookup = mock(SourceLookup.class); + when(leafLookup.asMap()).thenReturn(Collections.singletonMap("_source", sourceLookup)); + when(sourceLookup.get("three")).thenReturn(3); + ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, lookup); + ScriptedMetricAggContexts.MapScript script = leafFactory.newInstance(leafReaderContext); + + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(3, state.get("testField")); + } + public void testCombineBasic() { ScriptedMetricAggContexts.CombineScript.Factory factory = scriptEngine.compile("test", "state.testField = params.initialVal; return state.testField + params.inc", ScriptedMetricAggContexts.CombineScript.CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java index e72d597a6af..4c51b9fed69 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -27,15 +27,18 @@ import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; public class ScriptedMetricAggContexts { - private abstract static class ParamsAndStateBase { + + public abstract static class InitScript { private final Map params; private final Map state; - ParamsAndStateBase(Map params, Map state) { + public InitScript(Map params, Map state) { this.params = params; this.state = state; } @@ -47,12 +50,6 @@ public class ScriptedMetricAggContexts { public Object getState() { return state; } - } - - public abstract static class InitScript extends ParamsAndStateBase { - public InitScript(Map params, Map state) { - super(params, state); - } public abstract void execute(); @@ -64,14 +61,51 @@ public class ScriptedMetricAggContexts { public static ScriptContext CONTEXT = new ScriptContext<>("aggs_init", Factory.class); } - public abstract static class MapScript extends ParamsAndStateBase { + public abstract static class MapScript { + private static final Map DEPRECATIONS; + + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "doc", + "Accessing variable [doc] via [params.doc] from within a scripted metric agg map script " + + "is deprecated in favor of directly accessing [doc]." + ); + deprecations.put( + "_doc", + "Accessing variable [doc] via [params._doc] from within a scripted metric agg map script " + + "is deprecated in favor of directly accessing [doc]." + ); + deprecations.put( + "_agg", + "Accessing variable [_agg] via [params._agg] from within a scripted metric agg map script " + + "is deprecated in favor of using [state]." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } + + private final Map params; + private final Map state; private final LeafSearchLookup leafLookup; private Scorable scorer; public MapScript(Map params, Map state, SearchLookup lookup, LeafReaderContext leafContext) { - super(params, state); - + this.state = state; this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); + if (leafLookup != null) { + params = new HashMap<>(params); // copy params so we aren't modifying input + params.putAll(leafLookup.asMap()); // add lookup vars + params = new ParameterMap(params, DEPRECATIONS); // wrap with deprecations + } + this.params = params; + } + + public Map getParams() { + return params; + } + + public Map getState() { + return state; } // Return the doc as a map (instead of LeafDocLookup) in order to abide by type whitelisting rules for @@ -117,9 +151,21 @@ public class ScriptedMetricAggContexts { public static ScriptContext CONTEXT = new ScriptContext<>("aggs_map", Factory.class); } - public abstract static class CombineScript extends ParamsAndStateBase { + public abstract static class CombineScript { + private final Map params; + private final Map state; + public CombineScript(Map params, Map state) { - super(params, state); + this.params = params; + this.state = state; + } + + public Map getParams() { + return params; + } + + public Map getState() { + return state; } public abstract Object execute(); From 52266d8b115ce8633e57ff1e5abfbb951496e67b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 23 Oct 2018 21:08:34 -0400 Subject: [PATCH 19/67] TEST: Clone replicas list when compute replication targets (#34728) In #34407, we supposed to clone the list of replicas of ReplicationGroup when computing replication targets, but somehow we missed it. If we don't clone the list, a WriteReplicationAction may use an old ReplicationTargets which consists replicas which are removed from the current list of replicas Relates #34407 Closes #33457 --- .../ESIndexLevelReplicationTestCase.java | 32 ++++++++++++++++--- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index c4881d06351..60a7655e9ed 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -79,6 +79,7 @@ import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; @@ -295,6 +296,9 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase .filter(shardRouting -> shardRouting.isSameAllocation(replica.routingEntry())).findFirst().isPresent() == false : "replica with aId [" + replica.routingEntry().allocationId() + "] already exists"; replicas.add(replica); + if (replicationTargets != null) { + replicationTargets.addReplica(replica); + } updateAllocationIDsOnPrimary(); } @@ -310,6 +314,9 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase newShard(shardRouting, shardPath, indexMetaData, null, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); replicas.add(newReplica); + if (replicationTargets != null) { + replicationTargets.addReplica(newReplica); + } updateAllocationIDsOnPrimary(); return newReplica; } @@ -496,7 +503,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase } private synchronized void computeReplicationTargets() { - this.replicationTargets = new ReplicationTargets(primary, replicas); + this.replicationTargets = new ReplicationTargets(this.primary, new ArrayList<>(this.replicas)); } private synchronized ReplicationTargets getReplicationTargets() { @@ -510,7 +517,25 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase ReplicationTargets(IndexShard primary, List replicas) { this.primary = primary; - this.replicas = Collections.unmodifiableList(replicas); + this.replicas = replicas; + } + + /** + * This does not modify the replication targets, but only adds a replica to the list. + * If the targets is updated to include the given replica, a replication action would + * be able to find this replica to execute write requests on it. + */ + synchronized void addReplica(IndexShard replica) { + replicas.add(replica); + } + + synchronized IndexShard findReplicaShard(ShardRouting replicaRouting) { + for (IndexShard replica : replicas) { + if (replica.routingEntry().isSameAllocation(replicaRouting)) { + return replica; + } + } + throw new AssertionError("replica [" + replicaRouting + "] is not found; replicas[" + replicas + "] primary[" + primary + "]"); } } @@ -614,8 +639,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes, final ActionListener listener) { - IndexShard replica = replicationTargets.replicas.stream() - .filter(s -> replicaRouting.isSameAllocation(s.routingEntry())).findFirst().get(); + IndexShard replica = replicationTargets.findReplicaShard(replicaRouting); replica.acquireReplicaOperationPermit( getPrimaryShard().getPendingPrimaryTerm(), globalCheckpoint, From abf8cb670621cb8b457409b301911c955b654b7f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 24 Oct 2018 08:12:39 +0200 Subject: [PATCH 20/67] [CCR] Cleanup pause follow action (#34183) * Change the `TransportPauseFollowAction` to extend from `TransportMasterNodeAction` instead of `HandledAction`, this removes a sync cluster state api call. * Introduced `ResponseHandler` that removes duplicated code in `TransportPauseFollowAction` and `TransportResumeFollowAction`. * Changed `PauseFollowAction.Request` to not use `readFrom()`. --- .../xpack/ccr/action/ResponseHandler.java | 64 +++++++++ .../action/TransportPauseFollowAction.java | 128 +++++++----------- .../action/TransportResumeFollowAction.java | 51 +------ .../xpack/ccr/rest/RestPauseFollowAction.java | 3 +- .../xpack/ccr/IndexFollowingIT.java | 6 +- .../xpack/ccr/LocalIndexFollowingIT.java | 3 +- .../core/ccr/action/PauseFollowAction.java | 26 ++-- 7 files changed, 136 insertions(+), 145 deletions(-) create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ResponseHandler.java diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ResponseHandler.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ResponseHandler.java new file mode 100644 index 00000000000..6d28de05023 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ResponseHandler.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; + +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReferenceArray; + +final class ResponseHandler { + + private final AtomicInteger counter; + private final AtomicReferenceArray responses; + private final ActionListener listener; + + ResponseHandler(int numRequests, ActionListener listener) { + this.counter = new AtomicInteger(numRequests); + this.responses = new AtomicReferenceArray<>(numRequests); + this.listener = listener; + } + + ActionListener getActionListener(final int requestId) { + return new ActionListener() { + + @Override + public void onResponse(T response) { + responses.set(requestId, response); + finalizeResponse(); + } + + @Override + public void onFailure(Exception e) { + responses.set(requestId, e); + finalizeResponse(); + } + }; + } + + private void finalizeResponse() { + Exception error = null; + if (counter.decrementAndGet() == 0) { + for (int j = 0; j < responses.length(); j++) { + Object response = responses.get(j); + if (response instanceof Exception) { + if (error == null) { + error = (Exception) response; + } else { + error.addSuppressed((Exception) response); + } + } + } + + if (error == null) { + listener.onResponse(new AcknowledgedResponse(true)); + } else { + listener.onFailure(error); + } + } + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java index 47fd785a0d3..02f483cc843 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPauseFollowAction.java @@ -7,27 +7,27 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.Client; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; -import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.stream.Collectors; -public class TransportPauseFollowAction extends HandledTransportAction { +public class TransportPauseFollowAction extends TransportMasterNodeAction { - private final Client client; private final PersistentTasksService persistentTasksService; @Inject @@ -35,86 +35,60 @@ public class TransportPauseFollowAction extends HandledTransportAction listener) { + protected String executor() { + return ThreadPool.Names.SAME; + } - client.admin().cluster().state(new ClusterStateRequest(), ActionListener.wrap(r -> { - PersistentTasksCustomMetaData persistentTasksMetaData = r.getState().metaData().custom(PersistentTasksCustomMetaData.TYPE); - if (persistentTasksMetaData == null) { - listener.onFailure(new IllegalArgumentException("no shard follow tasks for [" + request.getFollowIndex() + "]")); - return; - } + @Override + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } - List shardFollowTaskIds = persistentTasksMetaData.tasks().stream() - .filter(persistentTask -> ShardFollowTask.NAME.equals(persistentTask.getTaskName())) - .filter(persistentTask -> { - ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTask.getParams(); - return shardFollowTask.getFollowShardId().getIndexName().equals(request.getFollowIndex()); - }) - .map(PersistentTasksCustomMetaData.PersistentTask::getId) - .collect(Collectors.toList()); + @Override + protected void masterOperation(PauseFollowAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + PersistentTasksCustomMetaData persistentTasksMetaData = state.metaData().custom(PersistentTasksCustomMetaData.TYPE); + if (persistentTasksMetaData == null) { + listener.onFailure(new IllegalArgumentException("no shard follow tasks for [" + request.getFollowIndex() + "]")); + return; + } - if (shardFollowTaskIds.isEmpty()) { - listener.onFailure(new IllegalArgumentException("no shard follow tasks for [" + request.getFollowIndex() + "]")); - return; - } + List shardFollowTaskIds = persistentTasksMetaData.tasks().stream() + .filter(persistentTask -> ShardFollowTask.NAME.equals(persistentTask.getTaskName())) + .filter(persistentTask -> { + ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTask.getParams(); + return shardFollowTask.getFollowShardId().getIndexName().equals(request.getFollowIndex()); + }) + .map(PersistentTasksCustomMetaData.PersistentTask::getId) + .collect(Collectors.toList()); - final AtomicInteger counter = new AtomicInteger(shardFollowTaskIds.size()); - final AtomicReferenceArray responses = new AtomicReferenceArray<>(shardFollowTaskIds.size()); - int i = 0; + if (shardFollowTaskIds.isEmpty()) { + listener.onFailure(new IllegalArgumentException("no shard follow tasks for [" + request.getFollowIndex() + "]")); + return; + } - for (String taskId : shardFollowTaskIds) { - final int shardId = i++; - persistentTasksService.sendRemoveRequest(taskId, - new ActionListener>() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { - responses.set(shardId, task); - finalizeResponse(); - } + int i = 0; + final ResponseHandler responseHandler = new ResponseHandler(shardFollowTaskIds.size(), listener); + for (String taskId : shardFollowTaskIds) { + final int taskSlot = i++; + persistentTasksService.sendRemoveRequest(taskId, responseHandler.getActionListener(taskSlot)); + } + } - @Override - public void onFailure(Exception e) { - responses.set(shardId, e); - finalizeResponse(); - } - - void finalizeResponse() { - Exception error = null; - if (counter.decrementAndGet() == 0) { - for (int j = 0; j < responses.length(); j++) { - Object response = responses.get(j); - if (response instanceof Exception) { - if (error == null) { - error = (Exception) response; - } else { - error.addSuppressed((Throwable) response); - } - } - } - - if (error == null) { - // include task ids? - listener.onResponse(new AcknowledgedResponse(true)); - } else { - // TODO: cancel all started tasks - listener.onFailure(error); - } - } - } - }); - } - }, listener::onFailure)); + @Override + protected ClusterBlockException checkBlock(PauseFollowAction.Request request, ClusterState state) { + return state.blocks().indexBlockedException(ClusterBlockLevel.METADATA_WRITE, request.getFollowIndex()); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index d65189434fa..36b56fb3d5e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.LicenseUtils; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -49,8 +48,6 @@ import java.util.Iterator; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.stream.Collectors; public class TransportResumeFollowAction extends HandledTransportAction { @@ -144,62 +141,22 @@ public class TransportResumeFollowAction extends HandledTransportAction handler) throws IOException { + ActionListener listener) throws IOException { MapperService mapperService = followIndexMetadata != null ? indicesService.createIndexMapperService(followIndexMetadata) : null; validate(request, leaderIndexMetadata, followIndexMetadata, leaderIndexHistoryUUIDs, mapperService); final int numShards = followIndexMetadata.getNumberOfShards(); - final AtomicInteger counter = new AtomicInteger(numShards); - final AtomicReferenceArray responses = new AtomicReferenceArray<>(followIndexMetadata.getNumberOfShards()); + final ResponseHandler handler = new ResponseHandler(numShards, listener); Map filteredHeaders = threadPool.getThreadContext().getHeaders().entrySet().stream() .filter(e -> ShardFollowTask.HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - for (int i = 0; i < numShards; i++) { - final int shardId = i; + for (int shardId = 0; shardId < numShards; shardId++) { String taskId = followIndexMetadata.getIndexUUID() + "-" + shardId; final ShardFollowTask shardFollowTask = createShardFollowTask(shardId, clusterNameAlias, request, leaderIndexMetadata, followIndexMetadata, filteredHeaders); - persistentTasksService.sendStartRequest(taskId, ShardFollowTask.NAME, shardFollowTask, - new ActionListener>() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { - responses.set(shardId, task); - finalizeResponse(); - } - - @Override - public void onFailure(Exception e) { - responses.set(shardId, e); - finalizeResponse(); - } - - void finalizeResponse() { - Exception error = null; - if (counter.decrementAndGet() == 0) { - for (int j = 0; j < responses.length(); j++) { - Object response = responses.get(j); - if (response instanceof Exception) { - if (error == null) { - error = (Exception) response; - } else { - error.addSuppressed((Throwable) response); - } - } - } - - if (error == null) { - // include task ids? - handler.onResponse(new AcknowledgedResponse(true)); - } else { - // TODO: cancel all started tasks - handler.onFailure(error); - } - } - } - } - ); + persistentTasksService.sendStartRequest(taskId, ShardFollowTask.NAME, shardFollowTask, handler.getActionListener(shardId)); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java index 9d4df8d8567..c7be6382fa7 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPauseFollowAction.java @@ -31,8 +31,7 @@ public class RestPauseFollowAction extends BaseRestHandler { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - Request request = new Request(); - request.setFollowIndex(restRequest.param("index")); + Request request = new Request(restRequest.param("index")); return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index 926b439ea35..5d7d58eb9b3 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -357,8 +357,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { } public void testUnfollowNonExistingIndex() { - PauseFollowAction.Request unfollowRequest = new PauseFollowAction.Request(); - unfollowRequest.setFollowIndex("non-existing-index"); + PauseFollowAction.Request unfollowRequest = new PauseFollowAction.Request("non-existing-index"); expectThrows(IllegalArgumentException.class, () -> followerClient().execute(PauseFollowAction.INSTANCE, unfollowRequest).actionGet()); } @@ -750,8 +749,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { private void pauseFollow(String... indices) throws Exception { for (String index : indices) { - final PauseFollowAction.Request unfollowRequest = new PauseFollowAction.Request(); - unfollowRequest.setFollowIndex(index); + final PauseFollowAction.Request unfollowRequest = new PauseFollowAction.Request(index); followerClient().execute(PauseFollowAction.INSTANCE, unfollowRequest).get(); } ensureNoCcrTasks(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java index 3267be6f420..51639f184ca 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java @@ -52,8 +52,7 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { assertThat(client().prepareSearch("follower").get().getHits().totalHits, equalTo(firstBatchNumDocs + secondBatchNumDocs)); }); - PauseFollowAction.Request pauseRequest = new PauseFollowAction.Request(); - pauseRequest.setFollowIndex("follower"); + PauseFollowAction.Request pauseRequest = new PauseFollowAction.Request("follower"); client().execute(PauseFollowAction.INSTANCE, pauseRequest); final long thirdBatchNumDocs = randomIntBetween(2, 64); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java index a5a45fea3f6..c20cda9ab4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java @@ -7,13 +7,14 @@ package org.elasticsearch.xpack.core.ccr.action; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.Objects; public class PauseFollowAction extends Action { @@ -29,29 +30,28 @@ public class PauseFollowAction extends Action { return new AcknowledgedResponse(); } - public static class Request extends ActionRequest { + public static class Request extends MasterNodeRequest { - private String followIndex; + private final String followIndex; + + public Request(String followIndex) { + this.followIndex = Objects.requireNonNull(followIndex, "followIndex"); + } + + public Request(StreamInput in) throws IOException { + super(in); + this.followIndex = in.readString(); + } public String getFollowIndex() { return followIndex; } - public void setFollowIndex(final String followIndex) { - this.followIndex = followIndex; - } - @Override public ActionRequestValidationException validate() { return null; } - @Override - public void readFrom(final StreamInput in) throws IOException { - super.readFrom(in); - followIndex = in.readString(); - } - @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); From 18007a29b2eaa499fcfa7aa1c82b0c09400a1b28 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 24 Oct 2018 08:38:25 +0200 Subject: [PATCH 21/67] [CCR] Made leader cluster required in shard follow task. Left over from #34580 --- .../elasticsearch/xpack/ccr/action/ShardFollowTask.java | 8 +++----- .../xpack/ccr/action/ShardFollowTasksExecutor.java | 9 --------- 2 files changed, 3 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java index 2dc08de8034..c7af4851ed4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java @@ -122,7 +122,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { } public ShardFollowTask(StreamInput in) throws IOException { - this.leaderCluster = in.readOptionalString(); + this.leaderCluster = in.readString(); this.followShardId = ShardId.readShardId(in); this.leaderShardId = ShardId.readShardId(in); this.maxBatchOperationCount = in.readVInt(); @@ -190,7 +190,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalString(leaderCluster); + out.writeString(leaderCluster); followShardId.writeTo(out); leaderShardId.writeTo(out); out.writeVLong(maxBatchOperationCount); @@ -210,9 +210,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - if (leaderCluster != null) { - builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); - } + builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); builder.field(FOLLOW_SHARD_INDEX_FIELD.getPreferredName(), followShardId.getIndex().getName()); builder.field(FOLLOW_SHARD_INDEX_UUID_FIELD.getPreferredName(), followShardId.getIndex().getUUID()); builder.field(FOLLOW_SHARD_SHARDID_FIELD.getPreferredName(), followShardId.id()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 0bb861d795f..014df78b39e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -67,15 +67,6 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor Date: Wed, 24 Oct 2018 09:27:08 +0200 Subject: [PATCH 22/67] [CCR] Removed unused static parse field. --- .../xpack/core/ccr/action/PutAutoFollowPatternAction.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 22c574d26d2..5b8336be768 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -42,8 +42,6 @@ public class PutAutoFollowPatternAction extends Action { public static class Request extends AcknowledgedRequest implements ToXContentObject { - static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster"); - private static final ObjectParser PARSER = new ObjectParser<>("put_auto_follow_pattern_request", Request::new); private static final ParseField NAME_FIELD = new ParseField("name"); From fe54f73434af0bda3b42bfa837b56e916c01251f Mon Sep 17 00:00:00 2001 From: hanbing0715 Date: Wed, 24 Oct 2018 18:55:21 +0800 Subject: [PATCH 23/67] Persist generated public keys unencrypted (#34626) The changes introduced in cca1a2a mean that we should not encrypt the public keys that might be generated by the key-pair-generator when storing the file, as the code that would consume them assumes that they are not encrypted --- .../license/licensor/tools/KeyPairGeneratorTool.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/license-tools/src/main/java/org/elasticsearch/license/licensor/tools/KeyPairGeneratorTool.java b/x-pack/license-tools/src/main/java/org/elasticsearch/license/licensor/tools/KeyPairGeneratorTool.java index c9c2e507933..d6927f1f611 100644 --- a/x-pack/license-tools/src/main/java/org/elasticsearch/license/licensor/tools/KeyPairGeneratorTool.java +++ b/x-pack/license-tools/src/main/java/org/elasticsearch/license/licensor/tools/KeyPairGeneratorTool.java @@ -21,7 +21,6 @@ import java.security.KeyPairGenerator; import java.security.SecureRandom; import static org.elasticsearch.license.CryptUtils.writeEncryptedPrivateKey; -import static org.elasticsearch.license.CryptUtils.writeEncryptedPublicKey; public class KeyPairGeneratorTool extends LoggingAwareCommand { @@ -65,7 +64,7 @@ public class KeyPairGeneratorTool extends LoggingAwareCommand { KeyPair keyPair = keyGen.generateKeyPair(); Files.write(privateKeyPath, writeEncryptedPrivateKey(keyPair.getPrivate())); - Files.write(publicKeyPath, writeEncryptedPublicKey(keyPair.getPublic())); + Files.write(publicKeyPath, keyPair.getPublic().getEncoded()); terminal.println( Terminal.Verbosity.VERBOSE, From be907516adb1d4409af349a7b84c8ea4f4ac8b2b Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 24 Oct 2018 13:32:48 +0200 Subject: [PATCH 24/67] Change ShardFollowTask defaults (#34793) Per #31717 this commit changes the defaults to the following: Batch size of 5120 ops. Maximum of 12 concurrent read requests. Maximum of 9 concurrent write requests. This is not necessarily our final values but it's good to have these as defaults for the purposes of initial testing. --- .../xpack/ccr/action/TransportResumeFollowAction.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 36b56fb3d5e..01d76a4ea26 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -54,10 +54,10 @@ public class TransportResumeFollowAction extends HandledTransportAction Date: Wed, 24 Oct 2018 13:39:36 +0200 Subject: [PATCH 25/67] [CCR] Renamed leader_cluster to remote_cluster (#34776) and also some occurrences of clusterAlias to remoteCluster. Closes #34682 --- .../xpack/ccr/CcrMultiClusterLicenseIT.java | 4 +- .../xpack/ccr/FollowIndexSecurityIT.java | 4 +- .../xpack/ccr/FollowIndexIT.java | 2 +- .../rest-api-spec/test/ccr/auto_follow.yml | 6 +-- .../test/ccr/follow_and_unfollow.yml | 2 +- .../rest-api-spec/test/ccr/follow_stats.yml | 2 +- .../xpack/ccr/ESCCRRestTestCase.java | 2 +- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 2 +- .../ccr/action/AutoFollowCoordinator.java | 24 +++++----- .../xpack/ccr/action/ShardFollowNodeTask.java | 2 +- .../xpack/ccr/action/ShardFollowTask.java | 24 +++++----- .../ccr/action/ShardFollowTasksExecutor.java | 4 +- .../TransportPutAutoFollowPatternAction.java | 6 +-- .../ccr/action/TransportPutFollowAction.java | 12 ++--- .../action/TransportResumeFollowAction.java | 2 +- .../xpack/CcrSingleNodeTestCase.java | 2 +- .../elasticsearch/xpack/ccr/AutoFollowIT.java | 4 +- .../elasticsearch/xpack/ccr/CcrLicenseIT.java | 2 +- .../xpack/ccr/IndexFollowingIT.java | 6 +-- .../action/AutoFollowCoordinatorTests.java | 14 +++--- .../PutAutoFollowPatternRequestTests.java | 6 +-- .../action/PutFollowActionRequestTests.java | 2 +- .../ShardFollowNodeTaskStatusTests.java | 2 +- ...ortDeleteAutoFollowPatternActionTests.java | 2 +- ...nsportPutAutoFollowPatternActionTests.java | 12 ++--- .../ccr/FollowStatsMonitoringDocTests.java | 6 +-- .../xpack/core/ccr/AutoFollowMetadata.java | 24 +++++----- .../core/ccr/ShardFollowNodeTaskStatus.java | 48 +++++++++---------- .../action/PutAutoFollowPatternAction.java | 27 ++++++----- .../core/ccr/action/PutFollowAction.java | 28 +++++------ .../src/main/resources/monitoring-es.json | 4 +- 31 files changed, 144 insertions(+), 143 deletions(-) diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index 7e85c19d7b9..074701c7313 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -25,7 +25,7 @@ public class CcrMultiClusterLicenseIT extends ESCCRRestTestCase { public void testFollow() { if ("follow".equals(targetCluster)) { final Request request = new Request("PUT", "/follower/_ccr/follow"); - request.setJsonEntity("{\"leader_cluster\": \"leader_cluster\", \"leader_index\": \"leader\"}"); + request.setJsonEntity("{\"remote_cluster\": \"leader_cluster\", \"leader_index\": \"leader\"}"); assertNonCompliantLicense(request); } } @@ -34,7 +34,7 @@ public class CcrMultiClusterLicenseIT extends ESCCRRestTestCase { assumeFalse("windows is the worst", Constants.WINDOWS); if ("follow".equals(targetCluster)) { final Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); - request.setJsonEntity("{\"leader_index_patterns\":[\"*\"], \"leader_cluster\": \"leader_cluster\"}"); + request.setJsonEntity("{\"leader_index_patterns\":[\"*\"], \"remote_cluster\": \"leader_cluster\"}"); client().performRequest(request); // parse the logs and ensure that the auto-coordinator skipped coordination on the leader cluster diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 18e061f3790..a8d9441f67d 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -132,13 +132,13 @@ public class FollowIndexSecurityIT extends ESCCRRestTestCase { { Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); - request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"leader_cluster\": \"leader_cluster\"}"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"remote_cluster\": \"leader_cluster\"}"); Exception e = expectThrows(ResponseException.class, () -> assertOK(client().performRequest(request))); assertThat(e.getMessage(), containsString("insufficient privileges to follow index [logs-*]")); } Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); - request.setJsonEntity("{\"leader_index_patterns\": [\"logs-eu-*\"], \"leader_cluster\": \"leader_cluster\"}"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-eu-*\"], \"remote_cluster\": \"leader_cluster\"}"); assertOK(client().performRequest(request)); try (RestClient leaderClient = buildLeaderClient()) { diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 9383d653de6..b038fbdaa03 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -83,7 +83,7 @@ public class FollowIndexIT extends ESCCRRestTestCase { assumeFalse("Test should only run when both clusters are running", "leader".equals(targetCluster)); Request request = new Request("PUT", "/_ccr/auto_follow/test_pattern"); - request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"leader_cluster\": \"leader_cluster\"}"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"], \"remote_cluster\": \"leader_cluster\"}"); assertOK(client().performRequest(request)); try (RestClient leaderClient = buildLeaderClient()) { diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml index 357fc7e1f56..e0f058080cb 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml @@ -23,7 +23,7 @@ ccr.put_auto_follow_pattern: name: my_pattern body: - leader_cluster: local + remote_cluster: local leader_index_patterns: ['logs-*'] max_concurrent_read_batches: 2 - is_true: acknowledged @@ -31,13 +31,13 @@ - do: ccr.get_auto_follow_pattern: name: my_pattern - - match: { my_pattern.leader_cluster: 'local' } + - match: { my_pattern.remote_cluster: 'local' } - match: { my_pattern.leader_index_patterns: ['logs-*'] } - match: { my_pattern.max_concurrent_read_batches: 2 } - do: ccr.get_auto_follow_pattern: {} - - match: { my_pattern.leader_cluster: 'local' } + - match: { my_pattern.remote_cluster: 'local' } - match: { my_pattern.leader_index_patterns: ['logs-*'] } - match: { my_pattern.max_concurrent_read_batches: 2 } diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml index f66825d0b92..d50bc52bc36 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_and_unfollow.yml @@ -38,7 +38,7 @@ ccr.follow: index: bar body: - leader_cluster: local + remote_cluster: local leader_index: foo - is_true: follow_index_created - is_true: follow_index_shards_acked diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml index ba784689dc5..29bb68369d8 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml @@ -37,7 +37,7 @@ ccr.follow: index: bar body: - leader_cluster: local + remote_cluster: local leader_index: foo - is_true: follow_index_created - is_true: follow_index_shards_acked diff --git a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java index b2e300ec4be..3b8951b343d 100644 --- a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java +++ b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java @@ -73,7 +73,7 @@ public class ESCCRRestTestCase extends ESRestTestCase { protected static void followIndex(RestClient client, String leaderCluster, String leaderIndex, String followIndex) throws IOException { final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); - request.setJsonEntity("{\"leader_cluster\": \"" + leaderCluster + "\", \"leader_index\": \"" + leaderIndex + + request.setJsonEntity("{\"remote_cluster\": \"" + leaderCluster + "\", \"leader_index\": \"" + leaderIndex + "\", \"poll_timeout\": \"10ms\"}"); assertOK(client.performRequest(request)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 442f2309da4..68a6310dcaa 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -98,7 +98,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E public static final String CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS = "leader_index_shard_history_uuids"; public static final String CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY = "leader_index_uuid"; public static final String CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY = "leader_index_name"; - public static final String CCR_CUSTOM_METADATA_LEADER_CLUSTER_NAME_KEY = "leader_cluster_name"; + public static final String CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY = "remote_cluster_name"; private final boolean enabled; private final Settings settings; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 031769d0abb..cbd31e4bae0 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -159,7 +159,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { @Override void getLeaderClusterState(final Map headers, - final String leaderClusterAlias, + final String remoteCluster, final BiConsumer handler) { final ClusterStateRequest request = new ClusterStateRequest(); request.clear(); @@ -168,7 +168,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { ccrLicenseChecker.checkRemoteClusterLicenseAndFetchClusterState( client, headers, - leaderClusterAlias, + remoteCluster, request, e -> handler.accept(null, e), leaderClusterState -> handler.accept(leaderClusterState, null)); @@ -245,14 +245,14 @@ public class AutoFollowCoordinator implements ClusterStateApplier { final int slot = i; final String autoFollowPattenName = entry.getKey(); final AutoFollowPattern autoFollowPattern = entry.getValue(); - final String leaderCluster = autoFollowPattern.getLeaderCluster(); + final String remoteCluster = autoFollowPattern.getRemoteCluster(); Map headers = autoFollowMetadata.getHeaders().get(autoFollowPattenName); - getLeaderClusterState(headers, leaderCluster, (leaderClusterState, e) -> { + getLeaderClusterState(headers, remoteCluster, (leaderClusterState, e) -> { if (leaderClusterState != null) { assert e == null; final List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(autoFollowPattenName); - final List leaderIndicesToFollow = getLeaderIndicesToFollow(leaderCluster, autoFollowPattern, + final List leaderIndicesToFollow = getLeaderIndicesToFollow(remoteCluster, autoFollowPattern, leaderClusterState, followerClusterState, followedIndices); if (leaderIndicesToFollow.isEmpty()) { finalise(slot, new AutoFollowResult(autoFollowPattenName)); @@ -260,12 +260,12 @@ public class AutoFollowCoordinator implements ClusterStateApplier { List> patternsForTheSameLeaderCluster = autoFollowMetadata.getPatterns() .entrySet().stream() .filter(item -> autoFollowPattenName.equals(item.getKey()) == false) - .filter(item -> leaderCluster.equals(item.getValue().getLeaderCluster())) + .filter(item -> remoteCluster.equals(item.getValue().getRemoteCluster())) .map(item -> new Tuple<>(item.getKey(), item.getValue())) .collect(Collectors.toList()); Consumer resultHandler = result -> finalise(slot, result); - checkAutoFollowPattern(autoFollowPattenName, leaderCluster, autoFollowPattern, leaderIndicesToFollow, headers, + checkAutoFollowPattern(autoFollowPattenName, remoteCluster, autoFollowPattern, leaderIndicesToFollow, headers, patternsForTheSameLeaderCluster, resultHandler); } } else { @@ -313,7 +313,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } private void followLeaderIndex(String autoFollowPattenName, - String leaderCluster, + String remoteCluster, Index indexToFollow, AutoFollowPattern pattern, Map headers, @@ -332,7 +332,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { followRequest.setPollTimeout(pattern.getPollTimeout()); PutFollowAction.Request request = new PutFollowAction.Request(); - request.setLeaderCluster(leaderCluster); + request.setRemoteCluster(remoteCluster); request.setLeaderIndex(indexToFollow.getName()); request.setFollowRequest(followRequest); @@ -357,7 +357,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } } - static List getLeaderIndicesToFollow(String clusterAlias, + static List getLeaderIndicesToFollow(String remoteCluster, AutoFollowPattern autoFollowPattern, ClusterState leaderClusterState, ClusterState followerClusterState, @@ -409,12 +409,12 @@ public class AutoFollowCoordinator implements ClusterStateApplier { * Fetch the cluster state from the leader with the specified cluster alias * * @param headers the client headers - * @param leaderClusterAlias the cluster alias of the leader + * @param remoteCluster the name of the leader cluster * @param handler the callback to invoke */ abstract void getLeaderClusterState( Map headers, - String leaderClusterAlias, + String remoteCluster, BiConsumer handler ); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 7d1e96c5d7c..19843ac4efb 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -443,7 +443,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { timeSinceLastFetchMillis = -1; } return new ShardFollowNodeTaskStatus( - params.getLeaderCluster(), + params.getRemoteCluster(), params.getLeaderShardId().getIndexName(), params.getFollowShardId().getIndexName(), getFollowShardId().getId(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java index c7af4851ed4..ea75ee2d9e1 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java @@ -36,7 +36,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { public static final Set HEADER_FILTERS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList("es-security-runas-user", "_xpack_security_authentication"))); - static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster"); + static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster"); static final ParseField FOLLOW_SHARD_INDEX_FIELD = new ParseField("follow_shard_index"); static final ParseField FOLLOW_SHARD_INDEX_UUID_FIELD = new ParseField("follow_shard_index_uuid"); static final ParseField FOLLOW_SHARD_SHARDID_FIELD = new ParseField("follow_shard_shard"); @@ -59,7 +59,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { (int) a[10], (int) a[11], (TimeValue) a[12], (TimeValue) a[13], (Map) a[14])); static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LEADER_CLUSTER_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REMOTE_CLUSTER_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), FOLLOW_SHARD_INDEX_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), FOLLOW_SHARD_INDEX_UUID_FIELD); PARSER.declareInt(ConstructingObjectParser.constructorArg(), FOLLOW_SHARD_SHARDID_FIELD); @@ -84,7 +84,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS); } - private final String leaderCluster; + private final String remoteCluster; private final ShardId followShardId; private final ShardId leaderShardId; private final int maxBatchOperationCount; @@ -97,7 +97,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { private final Map headers; ShardFollowTask( - final String leaderCluster, + final String remoteCluster, final ShardId followShardId, final ShardId leaderShardId, final int maxBatchOperationCount, @@ -108,7 +108,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { final TimeValue maxRetryDelay, final TimeValue pollTimeout, final Map headers) { - this.leaderCluster = leaderCluster; + this.remoteCluster = remoteCluster; this.followShardId = followShardId; this.leaderShardId = leaderShardId; this.maxBatchOperationCount = maxBatchOperationCount; @@ -122,7 +122,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { } public ShardFollowTask(StreamInput in) throws IOException { - this.leaderCluster = in.readString(); + this.remoteCluster = in.readString(); this.followShardId = ShardId.readShardId(in); this.leaderShardId = ShardId.readShardId(in); this.maxBatchOperationCount = in.readVInt(); @@ -135,8 +135,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); } - public String getLeaderCluster() { - return leaderCluster; + public String getRemoteCluster() { + return remoteCluster; } public ShardId getFollowShardId() { @@ -190,7 +190,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(leaderCluster); + out.writeString(remoteCluster); followShardId.writeTo(out); leaderShardId.writeTo(out); out.writeVLong(maxBatchOperationCount); @@ -210,7 +210,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); + builder.field(REMOTE_CLUSTER_FIELD.getPreferredName(), remoteCluster); builder.field(FOLLOW_SHARD_INDEX_FIELD.getPreferredName(), followShardId.getIndex().getName()); builder.field(FOLLOW_SHARD_INDEX_UUID_FIELD.getPreferredName(), followShardId.getIndex().getUUID()); builder.field(FOLLOW_SHARD_SHARDID_FIELD.getPreferredName(), followShardId.id()); @@ -233,7 +233,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ShardFollowTask that = (ShardFollowTask) o; - return Objects.equals(leaderCluster, that.leaderCluster) && + return Objects.equals(remoteCluster, that.remoteCluster) && Objects.equals(followShardId, that.followShardId) && Objects.equals(leaderShardId, that.leaderShardId) && maxBatchOperationCount == that.maxBatchOperationCount && @@ -249,7 +249,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { @Override public int hashCode() { return Objects.hash( - leaderCluster, + remoteCluster, followShardId, leaderShardId, maxBatchOperationCount, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 014df78b39e..128c2a846d8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -79,8 +79,8 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor headers) { ShardFollowTask params = taskInProgress.getParams(); final Client leaderClient; - if (params.getLeaderCluster() != null) { - leaderClient = wrapClient(client.getRemoteClusterClient(params.getLeaderCluster()), params.getHeaders()); + if (params.getRemoteCluster() != null) { + leaderClient = wrapClient(client.getRemoteClusterClient(params.getRemoteCluster()), params.getHeaders()); } else { leaderClient = wrapClient(client, params.getHeaders()); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index 8ac28e23fda..8832275f9a9 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -76,7 +76,7 @@ public class TransportPutAutoFollowPatternAction extends listener.onFailure(LicenseUtils.newComplianceException("ccr")); return; } - final Client leaderClient = client.getRemoteClusterClient(request.getLeaderCluster()); + final Client leaderClient = client.getRemoteClusterClient(request.getRemoteCluster()); final ClusterStateRequest clusterStateRequest = new ClusterStateRequest(); clusterStateRequest.clear(); clusterStateRequest.metaData(true); @@ -93,7 +93,7 @@ public class TransportPutAutoFollowPatternAction extends ActionListener.wrap( clusterStateResponse -> { final ClusterState leaderClusterState = clusterStateResponse.getState(); - clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderCluster(), + clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getRemoteCluster(), new AckedClusterStateUpdateTask(request, listener) { @Override @@ -157,7 +157,7 @@ public class TransportPutAutoFollowPatternAction extends } AutoFollowPattern autoFollowPattern = new AutoFollowPattern( - request.getLeaderCluster(), + request.getRemoteCluster(), request.getLeaderIndexPatterns(), request.getFollowIndexNamePattern(), request.getMaxBatchOperationCount(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 66bed231f72..d05f17000d9 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -95,22 +95,22 @@ public final class TransportPutFollowAction listener.onFailure(LicenseUtils.newComplianceException("ccr")); return; } - String leaderCluster = request.getLeaderCluster(); + String remoteCluster = request.getRemoteCluster(); // Validates whether the leader cluster has been configured properly: - client.getRemoteClusterClient(leaderCluster); + client.getRemoteClusterClient(remoteCluster); String leaderIndex = request.getLeaderIndex(); - createFollowerIndexAndFollowRemoteIndex(request, leaderCluster, leaderIndex, listener); + createFollowerIndexAndFollowRemoteIndex(request, remoteCluster, leaderIndex, listener); } private void createFollowerIndexAndFollowRemoteIndex( final PutFollowAction.Request request, - final String leaderCluster, + final String remoteCluster, final String leaderIndex, final ActionListener listener) { ccrLicenseChecker.checkRemoteClusterLicenseAndFetchLeaderIndexMetadataAndHistoryUUIDs( client, - leaderCluster, + remoteCluster, leaderIndex, listener::onFailure, (historyUUID, leaderIndexMetaData) -> createFollowerIndex(leaderIndexMetaData, historyUUID, request, listener)); @@ -160,7 +160,7 @@ public final class TransportPutFollowAction metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS, String.join(",", historyUUIDs)); metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_UUID_KEY, leaderIndexMetaData.getIndexUUID()); metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_NAME_KEY, leaderIndexMetaData.getIndex().getName()); - metadata.put(Ccr.CCR_CUSTOM_METADATA_LEADER_CLUSTER_NAME_KEY, request.getLeaderCluster()); + metadata.put(Ccr.CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY, request.getRemoteCluster()); imdBuilder.putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, metadata); // Copy all settings, but overwrite a few settings. diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 01d76a4ea26..53ac116d38e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -107,7 +107,7 @@ public class TransportResumeFollowAction extends HandledTransportAction followerClient().execute(PutFollowAction.INSTANCE, followRequest).actionGet()); assertThat(e.getMessage(), equalTo("unknown cluster alias [another_cluster]")); PutAutoFollowPatternAction.Request putAutoFollowRequest = new PutAutoFollowPatternAction.Request(); putAutoFollowRequest.setName("name"); - putAutoFollowRequest.setLeaderCluster("another_cluster"); + putAutoFollowRequest.setRemoteCluster("another_cluster"); putAutoFollowRequest.setLeaderIndexPatterns(Collections.singletonList("logs-*")); e = expectThrows(IllegalArgumentException.class, () -> followerClient().execute(PutAutoFollowPatternAction.INSTANCE, putAutoFollowRequest).actionGet()); @@ -961,7 +961,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { public static PutFollowAction.Request putFollow(String leaderIndex, String followerIndex) { PutFollowAction.Request request = new PutFollowAction.Request(); - request.setLeaderCluster("leader_cluster"); + request.setRemoteCluster("leader_cluster"); request.setLeaderIndex(leaderIndex); request.setFollowRequest(resumeFollow(followerIndex)); return request; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 9db8d5f55f0..c7aac7576b1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -83,7 +83,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { AutoFollower autoFollower = new AutoFollower(handler, currentState) { @Override void getLeaderClusterState(Map headers, - String leaderClusterAlias, + String remoteCluster, BiConsumer handler) { assertThat(headers, equalTo(autoFollowHeaders.get("remote"))); handler.accept(leaderState, null); @@ -95,7 +95,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { Runnable successHandler, Consumer failureHandler) { assertThat(headers, equalTo(autoFollowHeaders.get("remote"))); - assertThat(followRequest.getLeaderCluster(), equalTo("remote")); + assertThat(followRequest.getRemoteCluster(), equalTo("remote")); assertThat(followRequest.getLeaderIndex(), equalTo("logs-20190101")); assertThat(followRequest.getFollowRequest().getFollowerIndex(), equalTo("logs-20190101")); successHandler.run(); @@ -143,7 +143,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override void getLeaderClusterState(Map headers, - String leaderClusterAlias, + String remoteCluster, BiConsumer handler) { handler.accept(null, failure); } @@ -204,7 +204,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override void getLeaderClusterState(Map headers, - String leaderClusterAlias, + String remoteCluster, BiConsumer handler) { handler.accept(leaderState, null); } @@ -214,7 +214,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { PutFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { - assertThat(followRequest.getLeaderCluster(), equalTo("remote")); + assertThat(followRequest.getRemoteCluster(), equalTo("remote")); assertThat(followRequest.getLeaderIndex(), equalTo("logs-20190101")); assertThat(followRequest.getFollowRequest().getFollowerIndex(), equalTo("logs-20190101")); successHandler.run(); @@ -267,7 +267,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override void getLeaderClusterState(Map headers, - String leaderClusterAlias, + String remoteCluster, BiConsumer handler) { handler.accept(leaderState, null); } @@ -277,7 +277,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { PutFollowAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { - assertThat(followRequest.getLeaderCluster(), equalTo("remote")); + assertThat(followRequest.getRemoteCluster(), equalTo("remote")); assertThat(followRequest.getLeaderIndex(), equalTo("logs-20190101")); assertThat(followRequest.getFollowRequest().getFollowerIndex(), equalTo("logs-20190101")); failureHandler.accept(failure); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java index 2cefc163ee9..67957d1e366 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java @@ -42,7 +42,7 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent protected PutAutoFollowPatternAction.Request createTestInstance() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); request.setName(randomAlphaOfLength(4)); - request.setLeaderCluster(randomAlphaOfLength(4)); + request.setRemoteCluster(randomAlphaOfLength(4)); request.setLeaderIndexPatterns(Arrays.asList(generateRandomStringArray(4, 4, false))); if (randomBoolean()) { request.setFollowIndexNamePattern(randomAlphaOfLength(4)); @@ -80,9 +80,9 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent request.setName("name"); validationException = request.validate(); assertThat(validationException, notNullValue()); - assertThat(validationException.getMessage(), containsString("[leader_cluster] is missing")); + assertThat(validationException.getMessage(), containsString("[remote_cluster] is missing")); - request.setLeaderCluster("_alias"); + request.setRemoteCluster("_alias"); validationException = request.validate(); assertThat(validationException, notNullValue()); assertThat(validationException.getMessage(), containsString("[leader_index_patterns] is missing")); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java index b8c1d5511df..1385b383b94 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutFollowActionRequestTests.java @@ -21,7 +21,7 @@ public class PutFollowActionRequestTests extends AbstractStreamableXContentTestC @Override protected PutFollowAction.Request createTestInstance() { PutFollowAction.Request request = new PutFollowAction.Request(); - request.setLeaderCluster(randomAlphaOfLength(4)); + request.setRemoteCluster(randomAlphaOfLength(4)); request.setLeaderIndex(randomAlphaOfLength(4)); request.setFollowRequest(ResumeFollowActionRequestTests.createTestRequest()); return request; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java index 6bd5136e4be..a3881a6728f 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java @@ -65,7 +65,7 @@ public class ShardFollowNodeTaskStatusTests extends AbstractSerializingTestCase< @Override protected void assertEqualInstances(final ShardFollowNodeTaskStatus expectedInstance, final ShardFollowNodeTaskStatus newInstance) { assertNotSame(expectedInstance, newInstance); - assertThat(newInstance.getLeaderCluster(), equalTo(expectedInstance.getLeaderCluster())); + assertThat(newInstance.getRemoteCluster(), equalTo(expectedInstance.getRemoteCluster())); assertThat(newInstance.leaderIndex(), equalTo(expectedInstance.leaderIndex())); assertThat(newInstance.followerIndex(), equalTo(expectedInstance.followerIndex())); assertThat(newInstance.getShardId(), equalTo(expectedInstance.getShardId())); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java index 84df243bd94..e5e995fa4a4 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java @@ -63,7 +63,7 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { .custom(AutoFollowMetadata.TYPE); assertThat(result.getPatterns().size(), equalTo(1)); assertThat(result.getPatterns().get("name2"), notNullValue()); - assertThat(result.getPatterns().get("name2").getLeaderCluster(), equalTo("asia_cluster")); + assertThat(result.getPatterns().get("name2").getRemoteCluster(), equalTo("asia_cluster")); assertThat(result.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); assertThat(result.getFollowedLeaderIndexUUIDs().get("name2"), notNullValue()); assertThat(result.getHeaders().size(), equalTo(1)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java index 7c4368d317f..6d4ef138fb4 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java @@ -30,7 +30,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { public void testInnerPut() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); request.setName("name1"); - request.setLeaderCluster("eu_cluster"); + request.setRemoteCluster("eu_cluster"); request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) @@ -45,7 +45,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getRemoteCluster(), equalTo("eu_cluster")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(1)); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); @@ -55,7 +55,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { public void testInnerPut_existingLeaderIndices() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); request.setName("name1"); - request.setLeaderCluster("eu_cluster"); + request.setRemoteCluster("eu_cluster"); request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) @@ -86,7 +86,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getRemoteCluster(), equalTo("eu_cluster")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(1)); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); @@ -96,7 +96,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { public void testInnerPut_existingLeaderIndicesAndAutoFollowMetadata() { PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); request.setName("name1"); - request.setLeaderCluster("eu_cluster"); + request.setRemoteCluster("eu_cluster"); request.setLeaderIndexPatterns(Arrays.asList("logs-*", "transactions-*")); Map existingAutoFollowPatterns = new HashMap<>(); @@ -133,7 +133,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); assertThat(autoFollowMetadata, notNullValue()); assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); - assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderCluster(), equalTo("eu_cluster")); + assertThat(autoFollowMetadata.getPatterns().get("name1").getRemoteCluster(), equalTo("eu_cluster")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().size(), equalTo(2)); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(0), equalTo("logs-*")); assertThat(autoFollowMetadata.getPatterns().get("name1").getLeaderIndexPatterns().get(1), equalTo("transactions-*")); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index 219bf7187ba..4b36005de36 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -154,7 +154,7 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase> fetchExceptions = new TreeMap<>(Collections.singletonMap(1L, Tuple.tuple(2, new ElasticsearchException("shard is sad")))); final ShardFollowNodeTaskStatus status = new ShardFollowNodeTaskStatus( - "leader_cluster", + "remote_cluster", "leader_index", "follower_index", 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index 8f01c56c3f0..5234151010c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -175,7 +175,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public static class AutoFollowPattern implements Writeable, ToXContentObject { - public static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster"); + public static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster"); public static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_index_patterns"); public static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_index_pattern"); public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); @@ -194,7 +194,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i (TimeValue) args[9])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_CLUSTER_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), REMOTE_CLUSTER_FIELD); PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), LEADER_PATTERNS_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FOLLOW_PATTERN_FIELD); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_OPERATION_COUNT); @@ -214,7 +214,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i POLL_TIMEOUT, ObjectParser.ValueType.STRING); } - private final String leaderCluster; + private final String remoteCluster; private final List leaderIndexPatterns; private final String followIndexPattern; private final Integer maxBatchOperationCount; @@ -225,7 +225,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i private final TimeValue maxRetryDelay; private final TimeValue pollTimeout; - public AutoFollowPattern(String leaderCluster, + public AutoFollowPattern(String remoteCluster, List leaderIndexPatterns, String followIndexPattern, Integer maxBatchOperationCount, @@ -235,7 +235,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i Integer maxWriteBufferSize, TimeValue maxRetryDelay, TimeValue pollTimeout) { - this.leaderCluster = leaderCluster; + this.remoteCluster = remoteCluster; this.leaderIndexPatterns = leaderIndexPatterns; this.followIndexPattern = followIndexPattern; this.maxBatchOperationCount = maxBatchOperationCount; @@ -248,7 +248,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i } public AutoFollowPattern(StreamInput in) throws IOException { - leaderCluster = in.readString(); + remoteCluster = in.readString(); leaderIndexPatterns = in.readList(StreamInput::readString); followIndexPattern = in.readOptionalString(); maxBatchOperationCount = in.readOptionalVInt(); @@ -268,8 +268,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return Regex.simpleMatch(leaderIndexPatterns, indexName); } - public String getLeaderCluster() { - return leaderCluster; + public String getRemoteCluster() { + return remoteCluster; } public List getLeaderIndexPatterns() { @@ -310,7 +310,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(leaderCluster); + out.writeString(remoteCluster); out.writeStringList(leaderIndexPatterns); out.writeOptionalString(followIndexPattern); out.writeOptionalVInt(maxBatchOperationCount); @@ -324,7 +324,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); + builder.field(REMOTE_CLUSTER_FIELD.getPreferredName(), remoteCluster); builder.array(LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns.toArray(new String[0])); if (followIndexPattern != null) { builder.field(FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexPattern); @@ -363,7 +363,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AutoFollowPattern that = (AutoFollowPattern) o; - return Objects.equals(leaderCluster, that.leaderCluster) && + return Objects.equals(remoteCluster, that.remoteCluster) && Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) && Objects.equals(followIndexPattern, that.followIndexPattern) && Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) && @@ -378,7 +378,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i @Override public int hashCode() { return Objects.hash( - leaderCluster, + remoteCluster, leaderIndexPatterns, followIndexPattern, maxBatchOperationCount, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java index e21729df58b..5869c78bc73 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java @@ -34,7 +34,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { public static final String STATUS_PARSER_NAME = "shard-follow-node-task-status"; - private static final ParseField LEADER_CLUSTER = new ParseField("leader_cluster"); + private static final ParseField LEADER_CLUSTER = new ParseField("remote_cluster"); private static final ParseField LEADER_INDEX = new ParseField("leader_index"); private static final ParseField FOLLOWER_INDEX = new ParseField("follower_index"); private static final ParseField SHARD_ID = new ParseField("shard_id"); @@ -48,7 +48,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { private static final ParseField NUMBER_OF_QUEUED_WRITES_FIELD = new ParseField("number_of_queued_writes"); private static final ParseField MAPPING_VERSION_FIELD = new ParseField("mapping_version"); private static final ParseField TOTAL_FETCH_TIME_MILLIS_FIELD = new ParseField("total_fetch_time_millis"); - private static final ParseField TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD = new ParseField("total_fetch_leader_time_millis"); + private static final ParseField TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD = new ParseField("total_fetch_remote_time_millis"); private static final ParseField NUMBER_OF_SUCCESSFUL_FETCHES_FIELD = new ParseField("number_of_successful_fetches"); private static final ParseField NUMBER_OF_FAILED_FETCHES_FIELD = new ParseField("number_of_failed_fetches"); private static final ParseField OPERATIONS_RECEIVED_FIELD = new ParseField("operations_received"); @@ -118,7 +118,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_QUEUED_WRITES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAPPING_VERSION_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_TIME_MILLIS_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_FETCHES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_FETCHES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_RECEIVED_FIELD); @@ -147,10 +147,10 @@ public class ShardFollowNodeTaskStatus implements Task.Status { FETCH_EXCEPTIONS_ENTRY_EXCEPTION); } - private final String leaderCluster; + private final String remoteCluster; - public String getLeaderCluster() { - return leaderCluster; + public String getRemoteCluster() { + return remoteCluster; } private final String leaderIndex; @@ -231,10 +231,10 @@ public class ShardFollowNodeTaskStatus implements Task.Status { return totalFetchTimeMillis; } - private final long totalFetchLeaderTimeMillis; + private final long totalFetchRemoteTimeMillis; - public long totalFetchLeaderTimeMillis() { - return totalFetchLeaderTimeMillis; + public long totalFetchRemoteTimeMillis() { + return totalFetchRemoteTimeMillis; } private final long numberOfSuccessfulFetches; @@ -304,7 +304,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { } public ShardFollowNodeTaskStatus( - final String leaderCluster, + final String remoteCluster, final String leaderIndex, final String followerIndex, final int shardId, @@ -318,7 +318,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { final int numberOfQueuedWrites, final long mappingVersion, final long totalFetchTimeMillis, - final long totalFetchLeaderTimeMillis, + final long totalFetchRemoteTimeMillis, final long numberOfSuccessfulFetches, final long numberOfFailedFetches, final long operationsReceived, @@ -330,7 +330,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { final NavigableMap> fetchExceptions, final long timeSinceLastFetchMillis, final ElasticsearchException fatalException) { - this.leaderCluster = leaderCluster; + this.remoteCluster = remoteCluster; this.leaderIndex = leaderIndex; this.followerIndex = followerIndex; this.shardId = shardId; @@ -344,7 +344,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.numberOfQueuedWrites = numberOfQueuedWrites; this.mappingVersion = mappingVersion; this.totalFetchTimeMillis = totalFetchTimeMillis; - this.totalFetchLeaderTimeMillis = totalFetchLeaderTimeMillis; + this.totalFetchRemoteTimeMillis = totalFetchRemoteTimeMillis; this.numberOfSuccessfulFetches = numberOfSuccessfulFetches; this.numberOfFailedFetches = numberOfFailedFetches; this.operationsReceived = operationsReceived; @@ -359,7 +359,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { } public ShardFollowNodeTaskStatus(final StreamInput in) throws IOException { - this.leaderCluster = in.readOptionalString(); + this.remoteCluster = in.readOptionalString(); this.leaderIndex = in.readString(); this.followerIndex = in.readString(); this.shardId = in.readVInt(); @@ -373,7 +373,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.numberOfQueuedWrites = in.readVInt(); this.mappingVersion = in.readVLong(); this.totalFetchTimeMillis = in.readVLong(); - this.totalFetchLeaderTimeMillis = in.readVLong(); + this.totalFetchRemoteTimeMillis = in.readVLong(); this.numberOfSuccessfulFetches = in.readVLong(); this.numberOfFailedFetches = in.readVLong(); this.operationsReceived = in.readVLong(); @@ -395,7 +395,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { @Override public void writeTo(final StreamOutput out) throws IOException { - out.writeOptionalString(leaderCluster); + out.writeOptionalString(remoteCluster); out.writeString(leaderIndex); out.writeString(followerIndex); out.writeVInt(shardId); @@ -409,7 +409,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { out.writeVInt(numberOfQueuedWrites); out.writeVLong(mappingVersion); out.writeVLong(totalFetchTimeMillis); - out.writeVLong(totalFetchLeaderTimeMillis); + out.writeVLong(totalFetchRemoteTimeMillis); out.writeVLong(numberOfSuccessfulFetches); out.writeVLong(numberOfFailedFetches); out.writeVLong(operationsReceived); @@ -440,7 +440,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { } public XContentBuilder toXContentFragment(final XContentBuilder builder, final Params params) throws IOException { - builder.field(LEADER_CLUSTER.getPreferredName(), leaderCluster); + builder.field(LEADER_CLUSTER.getPreferredName(), remoteCluster); builder.field(LEADER_INDEX.getPreferredName(), leaderIndex); builder.field(FOLLOWER_INDEX.getPreferredName(), followerIndex); builder.field(SHARD_ID.getPreferredName(), shardId); @@ -458,9 +458,9 @@ public class ShardFollowNodeTaskStatus implements Task.Status { "total_fetch_time", new TimeValue(totalFetchTimeMillis, TimeUnit.MILLISECONDS)); builder.humanReadableField( - TOTAL_FETCH_LEADER_TIME_MILLIS_FIELD.getPreferredName(), + TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD.getPreferredName(), "total_fetch_leader_time", - new TimeValue(totalFetchLeaderTimeMillis, TimeUnit.MILLISECONDS)); + new TimeValue(totalFetchRemoteTimeMillis, TimeUnit.MILLISECONDS)); builder.field(NUMBER_OF_SUCCESSFUL_FETCHES_FIELD.getPreferredName(), numberOfSuccessfulFetches); builder.field(NUMBER_OF_FAILED_FETCHES_FIELD.getPreferredName(), numberOfFailedFetches); builder.field(OPERATIONS_RECEIVED_FIELD.getPreferredName(), operationsReceived); @@ -519,7 +519,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { final ShardFollowNodeTaskStatus that = (ShardFollowNodeTaskStatus) o; String fatalExceptionMessage = fatalException != null ? fatalException.getMessage() : null; String otherFatalExceptionMessage = that.fatalException != null ? that.fatalException.getMessage() : null; - return leaderCluster.equals(that.leaderCluster) && + return remoteCluster.equals(that.remoteCluster) && leaderIndex.equals(that.leaderIndex) && followerIndex.equals(that.followerIndex) && shardId == that.shardId && @@ -533,7 +533,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { numberOfQueuedWrites == that.numberOfQueuedWrites && mappingVersion == that.mappingVersion && totalFetchTimeMillis == that.totalFetchTimeMillis && - totalFetchLeaderTimeMillis == that.totalFetchLeaderTimeMillis && + totalFetchRemoteTimeMillis == that.totalFetchRemoteTimeMillis && numberOfSuccessfulFetches == that.numberOfSuccessfulFetches && numberOfFailedFetches == that.numberOfFailedFetches && operationsReceived == that.operationsReceived && @@ -556,7 +556,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { public int hashCode() { String fatalExceptionMessage = fatalException != null ? fatalException.getMessage() : null; return Objects.hash( - leaderCluster, + remoteCluster, leaderIndex, followerIndex, shardId, @@ -570,7 +570,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { numberOfQueuedWrites, mappingVersion, totalFetchTimeMillis, - totalFetchLeaderTimeMillis, + totalFetchRemoteTimeMillis, numberOfSuccessfulFetches, numberOfFailedFetches, operationsReceived, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 5b8336be768..5a87666d050 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern.REMOTE_CLUSTER_FIELD; public class PutAutoFollowPatternAction extends Action { @@ -48,7 +49,7 @@ public class PutAutoFollowPatternAction extends Action { static { PARSER.declareString(Request::setName, NAME_FIELD); - PARSER.declareString(Request::setLeaderCluster, AutoFollowPattern.LEADER_CLUSTER_FIELD); + PARSER.declareString(Request::setRemoteCluster, REMOTE_CLUSTER_FIELD); PARSER.declareStringArray(Request::setLeaderIndexPatterns, AutoFollowPattern.LEADER_PATTERNS_FIELD); PARSER.declareString(Request::setFollowIndexNamePattern, AutoFollowPattern.FOLLOW_PATTERN_FIELD); PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT); @@ -83,7 +84,7 @@ public class PutAutoFollowPatternAction extends Action { } private String name; - private String leaderCluster; + private String remoteCluster; private List leaderIndexPatterns; private String followIndexNamePattern; @@ -101,8 +102,8 @@ public class PutAutoFollowPatternAction extends Action { if (name == null) { validationException = addValidationError("[" + NAME_FIELD.getPreferredName() + "] is missing", validationException); } - if (leaderCluster == null) { - validationException = addValidationError("[" + AutoFollowPattern.LEADER_CLUSTER_FIELD.getPreferredName() + + if (remoteCluster == null) { + validationException = addValidationError("[" + REMOTE_CLUSTER_FIELD.getPreferredName() + "] is missing", validationException); } if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) { @@ -133,12 +134,12 @@ public class PutAutoFollowPatternAction extends Action { this.name = name; } - public String getLeaderCluster() { - return leaderCluster; + public String getRemoteCluster() { + return remoteCluster; } - public void setLeaderCluster(String leaderCluster) { - this.leaderCluster = leaderCluster; + public void setRemoteCluster(String remoteCluster) { + this.remoteCluster = remoteCluster; } public List getLeaderIndexPatterns() { @@ -217,7 +218,7 @@ public class PutAutoFollowPatternAction extends Action { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); name = in.readString(); - leaderCluster = in.readString(); + remoteCluster = in.readString(); leaderIndexPatterns = in.readList(StreamInput::readString); followIndexNamePattern = in.readOptionalString(); maxBatchOperationCount = in.readOptionalVInt(); @@ -233,7 +234,7 @@ public class PutAutoFollowPatternAction extends Action { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(name); - out.writeString(leaderCluster); + out.writeString(remoteCluster); out.writeStringList(leaderIndexPatterns); out.writeOptionalString(followIndexNamePattern); out.writeOptionalVInt(maxBatchOperationCount); @@ -250,7 +251,7 @@ public class PutAutoFollowPatternAction extends Action { builder.startObject(); { builder.field(NAME_FIELD.getPreferredName(), name); - builder.field(AutoFollowPattern.LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); + builder.field(REMOTE_CLUSTER_FIELD.getPreferredName(), remoteCluster); builder.field(AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); if (followIndexNamePattern != null) { builder.field(AutoFollowPattern.FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); @@ -287,7 +288,7 @@ public class PutAutoFollowPatternAction extends Action { if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; return Objects.equals(name, request.name) && - Objects.equals(leaderCluster, request.leaderCluster) && + Objects.equals(remoteCluster, request.remoteCluster) && Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) && Objects.equals(followIndexNamePattern, request.followIndexNamePattern) && Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && @@ -303,7 +304,7 @@ public class PutAutoFollowPatternAction extends Action { public int hashCode() { return Objects.hash( name, - leaderCluster, + remoteCluster, leaderIndexPatterns, followIndexNamePattern, maxBatchOperationCount, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 291fc853335..28895a59073 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -51,7 +51,7 @@ public final class PutFollowAction extends Action { public static class Request extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { - private static final ParseField LEADER_CLUSTER_FIELD = new ParseField("leader_cluster"); + private static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster"); private static final ParseField LEADER_INDEX_FIELD = new ParseField("leader_index"); private static final ObjectParser PARSER = new ObjectParser<>(NAME, () -> { @@ -61,7 +61,7 @@ public final class PutFollowAction extends Action { }); static { - PARSER.declareString(Request::setLeaderCluster, LEADER_CLUSTER_FIELD); + PARSER.declareString(Request::setRemoteCluster, REMOTE_CLUSTER_FIELD); PARSER.declareString(Request::setLeaderIndex, LEADER_INDEX_FIELD); PARSER.declareString((request, value) -> request.followRequest.setFollowerIndex(value), FOLLOWER_INDEX_FIELD); PARSER.declareInt((request, value) -> request.followRequest.setMaxBatchOperationCount(value), MAX_BATCH_OPERATION_COUNT); @@ -99,19 +99,19 @@ public final class PutFollowAction extends Action { return request; } - private String leaderCluster; + private String remoteCluster; private String leaderIndex; private ResumeFollowAction.Request followRequest; public Request() { } - public String getLeaderCluster() { - return leaderCluster; + public String getRemoteCluster() { + return remoteCluster; } - public void setLeaderCluster(String leaderCluster) { - this.leaderCluster = leaderCluster; + public void setRemoteCluster(String remoteCluster) { + this.remoteCluster = remoteCluster; } public String getLeaderIndex() { @@ -133,8 +133,8 @@ public final class PutFollowAction extends Action { @Override public ActionRequestValidationException validate() { ActionRequestValidationException e = followRequest.validate(); - if (leaderCluster == null) { - e = addValidationError(LEADER_CLUSTER_FIELD.getPreferredName() + " is missing", e); + if (remoteCluster == null) { + e = addValidationError(REMOTE_CLUSTER_FIELD.getPreferredName() + " is missing", e); } if (leaderIndex == null) { e = addValidationError(LEADER_INDEX_FIELD.getPreferredName() + " is missing", e); @@ -155,7 +155,7 @@ public final class PutFollowAction extends Action { @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - leaderCluster = in.readString(); + remoteCluster = in.readString(); leaderIndex = in.readString(); followRequest = new ResumeFollowAction.Request(); followRequest.readFrom(in); @@ -164,7 +164,7 @@ public final class PutFollowAction extends Action { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(leaderCluster); + out.writeString(remoteCluster); out.writeString(leaderIndex); followRequest.writeTo(out); } @@ -173,7 +173,7 @@ public final class PutFollowAction extends Action { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(LEADER_CLUSTER_FIELD.getPreferredName(), leaderCluster); + builder.field(REMOTE_CLUSTER_FIELD.getPreferredName(), remoteCluster); builder.field(LEADER_INDEX_FIELD.getPreferredName(), leaderIndex); followRequest.toXContentFragment(builder, params); } @@ -186,14 +186,14 @@ public final class PutFollowAction extends Action { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(leaderCluster, request.leaderCluster) && + return Objects.equals(remoteCluster, request.remoteCluster) && Objects.equals(leaderIndex, request.leaderIndex) && Objects.equals(followRequest, request.followRequest); } @Override public int hashCode() { - return Objects.hash(leaderCluster, leaderIndex, followRequest); + return Objects.hash(remoteCluster, leaderIndex, followRequest); } } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 791a0ea02c3..bad47024f05 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -929,7 +929,7 @@ }, "ccr_stats": { "properties": { - "leader_cluster": { + "remote_cluster": { "type": "keyword" }, "leader_index": { @@ -971,7 +971,7 @@ "total_fetch_time_millis": { "type": "long" }, - "total_fetch_leader_time_millis": { + "total_fetch_remote_time_millis": { "type": "long" }, "number_of_successful_fetches": { From d5ad3de42ee7f1b45d57922e89e6d7a8b8f6e51e Mon Sep 17 00:00:00 2001 From: lipsill <39668292+lipsill@users.noreply.github.com> Date: Wed, 24 Oct 2018 14:21:24 +0200 Subject: [PATCH 26/67] [test] Introduce strict deprecation mode for REST tests (#34338) #33708 introduced a strict deprecation mode that makes a REST request fail if there is a warning header in the response returned by Elasticsearch (usually a deprecation message signaling that a feature or a field has been deprecated). This change adds the strict deprecation mode into the REST integration tests, and makes the tests fail if a deprecated feature is used. Also any test using a deprecated feature has been modified to pass the build. The YAML integration tests already analyzed HTTP warnings so they do not use this mode, keeping their "expected vs actual" behavior. --- .../upgrades/FullClusterRestartIT.java | 29 ++++++++++++++---- .../upgrades/QueryBuilderBWCIT.java | 2 +- .../elasticsearch/upgrades/IndexingIT.java | 26 ++++++++++++++++ .../test/rest/ESRestTestCase.java | 9 ++++++ .../rest/yaml/ESClientYamlSuiteTestCase.java | 5 ++++ .../AbstractFullClusterRestartTestCase.java | 30 +++++++++++++++++++ .../ml/transforms/PainlessDomainSplitIT.java | 4 +-- .../elasticsearch/upgrades/IndexingIT.java | 26 ++++++++++++++++ .../TokenBackwardsCompatibilityIT.java | 25 ++++++++++++++++ 9 files changed, 147 insertions(+), 9 deletions(-) diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 6df1854cc22..ce66800d892 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -60,6 +60,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.startsWith; /** * Tests to run before and after a full cluster restart. This is run twice, @@ -75,7 +76,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { private String index; @Before - public void setIndex() { + public void setIndex() throws IOException { index = getTestName().toLowerCase(Locale.ROOT); } @@ -283,7 +284,8 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); - mappingsAndSettings.field("template", index); + mappingsAndSettings.field("index_patterns", index); + mappingsAndSettings.field("order", "1000"); { mappingsAndSettings.startObject("settings"); mappingsAndSettings.field("number_of_shards", 1); @@ -361,6 +363,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { client().performRequest(updateSettingsRequest); Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex); + shrinkIndexRequest.addParameter("copy_settings", "true"); shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}"); client().performRequest(shrinkIndexRequest); @@ -844,7 +847,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { // Stick a template into the cluster so we can see it after the restore XContentBuilder templateBuilder = JsonXContent.contentBuilder().startObject(); - templateBuilder.field("template", "evil_*"); // Don't confuse other tests by applying the template + templateBuilder.field("index_patterns", "evil_*"); // Don't confuse other tests by applying the template templateBuilder.startObject("settings"); { templateBuilder.field("number_of_shards", 1); } @@ -949,9 +952,23 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { assertEquals(singletonList(tookOnVersion.toString()), XContentMapValues.extractValue("snapshots.version", listSnapshotResponse)); // Remove the routing setting and template so we can test restoring them. - Request clearRoutingFromSettings = new Request("PUT", "/_cluster/settings"); - clearRoutingFromSettings.setJsonEntity("{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}"); - client().performRequest(clearRoutingFromSettings); + try { + Request clearRoutingFromSettings = new Request("PUT", "/_cluster/settings"); + clearRoutingFromSettings.setJsonEntity("{\"persistent\":{\"cluster.routing.allocation.exclude.test_attr\": null}}"); + client().performRequest(clearRoutingFromSettings); + } catch (ResponseException e) { + if (e.getResponse().hasWarnings() + && (isRunningAgainstOldCluster() == false || getOldClusterVersion().onOrAfter(Version.V_6_5_0))) { + e.getResponse().getWarnings().stream().forEach(warning -> { + assertThat(warning, containsString( + "setting was deprecated in Elasticsearch and will be removed in a future release! " + + "See the breaking changes documentation for the next major version.")); + assertThat(warning, startsWith("[search.remote.")); + }); + } else { + throw e; + } + } client().performRequest(new Request("DELETE", "/_template/test_template")); // Restore diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 2b7250f86b7..c3cd8f61538 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -196,7 +196,7 @@ public class QueryBuilderBWCIT extends AbstractFullClusterRestartTestCase { QueryBuilder expectedQueryBuilder = (QueryBuilder) CANDIDATES.get(i)[1]; Request request = new Request("GET", "/" + index + "/_search"); request.setJsonEntity("{\"query\": {\"ids\": {\"values\": [\"" + Integer.toString(i) + "\"]}}, " + - "\"docvalue_fields\" : [\"query.query_builder_field\"]}"); + "\"docvalue_fields\": [{\"field\":\"query.query_builder_field\", \"format\":\"use_field_mapping\"}]}"); Response rsp = client().performRequest(request); assertEquals(200, rsp.getStatusLine().getStatusCode()); Map hitRsp = (Map) ((List) ((Map)toMap(rsp).get("hits")).get("hits")).get(0); diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java index 3898746e5c3..0b186db0f7a 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java @@ -20,12 +20,18 @@ package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import java.io.IOException; import java.nio.charset.StandardCharsets; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + /** * Basic test that indexed documents survive the rolling restart. See * {@link RecoveryIT} for much more in depth testing of the mechanism @@ -60,6 +66,26 @@ public class IndexingIT extends AbstractRollingTestCase { } if (CLUSTER_TYPE == ClusterType.OLD) { + { + Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion(); + assertThat("this branch is not needed if we aren't compatible with 6.0", + minimumIndexCompatibilityVersion.onOrBefore(Version.V_6_0_0), equalTo(true)); + if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0_alpha1)) { + XContentBuilder template = jsonBuilder(); + template.startObject(); + { + template.field("index_patterns", "*"); + template.startObject("settings"); + template.field("number_of_shards", 5); + template.endObject(); + } + template.endObject(); + Request createTemplate = new Request("PUT", "/_template/template"); + createTemplate.setJsonEntity(Strings.toString(template)); + client().performRequest(createTemplate); + } + } + Request createTestIndex = new Request("PUT", "/test_index"); createTestIndex.setJsonEntity("{\"settings\": {\"index.number_of_replicas\": 0}}"); client().performRequest(createTestIndex); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 91d70b260fe..72299dab912 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -577,9 +577,18 @@ public abstract class ESRestTestCase extends ESTestCase { protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { RestClientBuilder builder = RestClient.builder(hosts); configureClient(builder, settings); + builder.setStrictDeprecationMode(getStrictDeprecationMode()); return builder.build(); } + /** + * Whether the used REST client should return any response containing at + * least one warning header as a failure. + */ + protected boolean getStrictDeprecationMode() { + return true; + } + protected static void configureClient(RestClientBuilder builder, Settings settings) throws IOException { String keystorePath = settings.get(TRUSTSTORE_PATH); if (keystorePath != null) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index f76c5423534..011da53384d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -408,4 +408,9 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { configureClient(builder, restClientSettings()); return builder; } + + @Override + protected boolean getStrictDeprecationMode() { + return false; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java b/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java index 861d574b346..b61182415ee 100644 --- a/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java @@ -20,13 +20,43 @@ package org.elasticsearch.upgrades; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Before; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; public abstract class AbstractFullClusterRestartTestCase extends ESRestTestCase { private final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster")); + @Before + public void init() throws IOException { + assertThat("we don't need this branch if we aren't compatible with 6.0", + Version.CURRENT.minimumIndexCompatibilityVersion().onOrBefore(Version.V_6_0_0), equalTo(true)); + if (isRunningAgainstOldCluster() && getOldClusterVersion().before(Version.V_7_0_0_alpha1)) { + XContentBuilder template = jsonBuilder(); + template.startObject(); + { + template.field("index_patterns", "*"); + template.field("order", "0"); + template.startObject("settings"); + template.field("number_of_shards", 5); + template.endObject(); + } + template.endObject(); + Request createTemplate = new Request("PUT", "/_template/template"); + createTemplate.setJsonEntity(Strings.toString(template)); + client().performRequest(createTemplate); + } + } + public final boolean isRunningAgainstOldCluster() { return runningAgainstOldCluster; } diff --git a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java index 7af4453c2d4..bc847e1a07d 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java +++ b/x-pack/plugin/ml/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/ml/transforms/PainlessDomainSplitIT.java @@ -195,7 +195,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase { String mapAsJson = Strings.toString(jsonBuilder().map(params)); logger.info("params={}", mapAsJson); - Request searchRequest = new Request("GET", "/painless/test/_search"); + Request searchRequest = new Request("GET", "/painless/_search"); searchRequest.setJsonEntity( "{\n" + " \"query\" : {\n" + @@ -205,7 +205,7 @@ public class PainlessDomainSplitIT extends ESRestTestCase { " \"domain_split\" : {\n" + " \"script\" : {\n" + " \"lang\": \"painless\",\n" + - " \"inline\": \"" + + " \"source\": \"" + " return domainSplit(params['host']); \",\n" + " \"params\": " + mapAsJson + "\n" + " }\n" + diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java index 3448117cd2c..20e7bf07e0f 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/IndexingIT.java @@ -7,12 +7,18 @@ package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import java.io.IOException; import java.nio.charset.StandardCharsets; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + /** * Basic test that indexed documents survive the rolling restart. *

@@ -45,6 +51,26 @@ public class IndexingIT extends AbstractUpgradeTestCase { } if (CLUSTER_TYPE == ClusterType.OLD) { + { + Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion(); + assertThat("this branch is not needed if we aren't compatible with 6.0", + minimumIndexCompatibilityVersion.onOrBefore(Version.V_6_0_0), equalTo(true)); + if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0_alpha1)) { + XContentBuilder template = jsonBuilder(); + template.startObject(); + { + template.field("index_patterns", "*"); + template.startObject("settings"); + template.field("number_of_shards", 5); + template.endObject(); + } + template.endObject(); + Request createTemplate = new Request("PUT", "/_template/template"); + createTemplate.setJsonEntity(Strings.toString(template)); + client().performRequest(createTemplate); + } + } + Request createTestIndex = new Request("PUT", "/test_index"); createTestIndex.setJsonEntity("{\"settings\": {\"index.number_of_replicas\": 0}}"); client().performRequest(createTestIndex); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index 24965efc621..6afecfc2f28 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -13,6 +13,8 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.rest.yaml.ObjectPath; import java.io.IOException; @@ -20,10 +22,33 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase { public void testGeneratingTokenInOldCluster() throws Exception { assumeTrue("this test should only run against the old cluster", CLUSTER_TYPE == ClusterType.OLD); + { + Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion(); + assertThat("this branch is not needed if we aren't compatible with 6.0", + minimumIndexCompatibilityVersion.onOrBefore(Version.V_6_0_0), equalTo(true)); + if (minimumIndexCompatibilityVersion.before(Version.V_7_0_0_alpha1)) { + XContentBuilder template = jsonBuilder(); + template.startObject(); + { + template.field("index_patterns", "*"); + template.startObject("settings"); + template.field("number_of_shards", 5); + template.endObject(); + } + template.endObject(); + Request createTemplate = new Request("PUT", "/_template/template"); + createTemplate.setJsonEntity(Strings.toString(template)); + client().performRequest(createTemplate); + } + } + Request createTokenRequest = new Request("POST", "_xpack/security/oauth2/token"); createTokenRequest.setJsonEntity( "{\n" + From 4c73854da78d9f95e1c9a9aa7ae973e870102ba6 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Wed, 24 Oct 2018 14:42:40 +0200 Subject: [PATCH 27/67] SQL: Implement null handling for `IN(v1, v2, ...)` (#34750) Implemented null handling for both the value tested but also for values inside the list of values tested against. The null handling is implemented for local processors, painless scripts and Lucene Terms queries making it available for `IN` expressions occuring in `SELECT`, `WHERE` and `HAVING` clauses. Closes: #34582 --- .../xpack/sql/type/DataType.java | 11 ++-- .../xpack/sql/expression/Foldables.java | 2 +- .../xpack/sql/expression/predicate/In.java | 29 ++++++----- .../operator/comparison/InProcessor.java | 7 ++- .../xpack/sql/querydsl/query/TermsQuery.java | 3 ++ .../predicate/InProcessorTests.java | 11 ++++ .../sql/expression/predicate/InTests.java | 50 +++++++++++++++++++ .../sql/planner/QueryTranslatorTests.java | 26 ++++++++++ x-pack/qa/sql/src/main/resources/agg.sql-spec | 6 +++ .../qa/sql/src/main/resources/filter.sql-spec | 5 ++ .../qa/sql/src/main/resources/select.csv-spec | 37 +++++++++++++- 11 files changed, 164 insertions(+), 23 deletions(-) create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InTests.java diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 1c9cf6ac925..88b952b87ac 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -233,12 +233,9 @@ public enum DataType { public boolean isCompatibleWith(DataType other) { if (this == other) { return true; - } else if (isString() && other.isString()) { - return true; - } else if (isNumeric() && other.isNumeric()) { - return true; - } else { - return false; - } + } else return + (this == NULL || other == NULL) || + (isString() && other.isString()) || + (isNumeric() && other.isNumeric()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Foldables.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Foldables.java index 6e06a1d1c85..8c672ed162e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Foldables.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Foldables.java @@ -48,7 +48,7 @@ public abstract class Foldables { public static List valuesOf(List list, DataType to) { List l = new ArrayList<>(list.size()); for (Expression e : list) { - l.add(valueOf(e, to)); + l.add(valueOf(e, to)); } return l; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java index a820833d1a0..1574e406a1e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java @@ -82,16 +82,18 @@ public class In extends NamedExpression implements ScriptWeaver { } @Override - public Object fold() { + public Boolean fold() { Object foldedLeftValue = value.fold(); - + Boolean result = false; for (Expression rightValue : list) { Boolean compResult = Comparisons.eq(foldedLeftValue, rightValue.fold()); - if (compResult != null && compResult) { + if (compResult == null) { + result = null; + } else if (compResult) { return true; } } - return false; + return result; } @Override @@ -118,15 +120,18 @@ public class In extends NamedExpression implements ScriptWeaver { String scriptPrefix = leftScript + "=="; LinkedHashSet values = list.stream().map(Expression::fold).collect(Collectors.toCollection(LinkedHashSet::new)); for (Object valueFromList : values) { - if (valueFromList instanceof Expression) { - ScriptTemplate rightScript = asScript((Expression) valueFromList); - sj.add(scriptPrefix + rightScript.template()); - rightParams.add(rightScript.params()); - } else { - if (valueFromList instanceof String) { - sj.add(scriptPrefix + '"' + valueFromList + '"'); + // if checked against null => false + if (valueFromList != null) { + if (valueFromList instanceof Expression) { + ScriptTemplate rightScript = asScript((Expression) valueFromList); + sj.add(scriptPrefix + rightScript.template()); + rightParams.add(rightScript.params()); } else { - sj.add(scriptPrefix + valueFromList.toString()); + if (valueFromList instanceof String) { + sj.add(scriptPrefix + '"' + valueFromList + '"'); + } else { + sj.add(scriptPrefix + valueFromList.toString()); + } } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java index 5ebf8870965..0a901b5b5e6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/comparison/InProcessor.java @@ -40,14 +40,17 @@ public class InProcessor implements Processor { @Override public Object process(Object input) { Object leftValue = processsors.get(processsors.size() - 1).process(input); + Boolean result = false; for (int i = 0; i < processsors.size() - 1; i++) { Boolean compResult = Comparisons.eq(leftValue, processsors.get(i).process(input)); - if (compResult != null && compResult) { + if (compResult == null) { + result = null; + } else if (compResult) { return true; } } - return false; + return result; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java index 412df4e8ca6..4366e2d404c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java @@ -9,6 +9,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.type.DataType; import java.util.LinkedHashSet; import java.util.List; @@ -24,7 +25,9 @@ public class TermsQuery extends LeafQuery { public TermsQuery(Location location, String term, List values) { super(location); this.term = term; + values.removeIf(e -> e.dataType() == DataType.NULL); this.values = new LinkedHashSet<>(Foldables.valuesOf(values, values.get(0).dataType())); + this.values.removeIf(Objects::isNull); } @Override diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java index 3e71ac90f81..12bba003115 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/InProcessorTests.java @@ -22,6 +22,7 @@ public class InProcessorTests extends AbstractWireSerializingTestCase")); } + + public void testTranslateInExpression_HavingClauseAndNullHandling_Painless() { + LogicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING max(int) in (10, null, 20, null, 30 - 10)"); + assertTrue(p instanceof Project); + assertTrue(p.children().get(0) instanceof Filter); + Expression condition = ((Filter) p.children().get(0)).condition(); + assertFalse(condition.foldable()); + QueryTranslation translation = QueryTranslator.toQuery(condition, false); + assertTrue(translation.query instanceof ScriptQuery); + ScriptQuery sq = (ScriptQuery) translation.query; + assertEquals("InternalSqlScriptUtils.nullSafeFilter(params.a0==10 || params.a0==20)", sq.script().toString()); + assertThat(sq.script().params().toString(), startsWith("[{a=MAX(int){a->")); + } } diff --git a/x-pack/qa/sql/src/main/resources/agg.sql-spec b/x-pack/qa/sql/src/main/resources/agg.sql-spec index 2c6248059f5..110882dc21e 100644 --- a/x-pack/qa/sql/src/main/resources/agg.sql-spec +++ b/x-pack/qa/sql/src/main/resources/agg.sql-spec @@ -450,3 +450,9 @@ selectHireDateGroupByHireDate SELECT hire_date HD, COUNT(*) c FROM test_emp GROUP BY hire_date ORDER BY hire_date DESC; selectSalaryGroupBySalary SELECT salary, COUNT(*) c FROM test_emp GROUP BY salary ORDER BY salary DESC; + +// filter with IN +aggMultiWithHavingUsingInAndNullHandling +SELECT MIN(salary) min, MAX(salary) max, gender g, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g HAVING max IN(74999, null, 74600) ORDER BY gender; +aggMultiGroupByMultiWithHavingUsingInAndNullHandling +SELECT MIN(salary) min, MAX(salary) max, gender g, languages l, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g, languages HAVING max IN (74500, null, 74600) ORDER BY gender, languages; diff --git a/x-pack/qa/sql/src/main/resources/filter.sql-spec b/x-pack/qa/sql/src/main/resources/filter.sql-spec index 1a564ecb9ad..79b3836b959 100644 --- a/x-pack/qa/sql/src/main/resources/filter.sql-spec +++ b/x-pack/qa/sql/src/main/resources/filter.sql-spec @@ -96,3 +96,8 @@ SELECT last_name l FROM "test_emp" WHERE emp_no NOT IN (10000, 10001, 10002, 999 whereWithInAndComplexFunctions SELECT last_name l FROM "test_emp" WHERE emp_no NOT IN (10000, abs(2 - 10003), 10002, 999) AND lcase(first_name) IN ('sumant', 'mary', 'patricio', 'No''Match') ORDER BY emp_no LIMIT 5; + +whereWithInAndNullHandling1 +SELECT last_name l FROM "test_emp" WHERE birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) AND (emp_no = 10038 OR emp_no = 10039 OR emp_no = 10040) ORDER BY emp_no; +whereWithInAndNullHandling2 +SELECT last_name l FROM "test_emp" WHERE birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), null, CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) AND (emp_no = 10038 OR emp_no = 10039 OR emp_no = 10040) ORDER BY emp_no; diff --git a/x-pack/qa/sql/src/main/resources/select.csv-spec b/x-pack/qa/sql/src/main/resources/select.csv-spec index b3888abd47b..bf208c62026 100644 --- a/x-pack/qa/sql/src/main/resources/select.csv-spec +++ b/x-pack/qa/sql/src/main/resources/select.csv-spec @@ -25,6 +25,22 @@ false |true ; +inWithNullHandling +SELECT 2 IN (1, null, 3), 3 IN (1, null, 3), null IN (1, null, 3), null IN (1, 2, 3); + + 2 IN (1, null, 3) | 3 IN (1, null, 3) | null IN (1, null, 3) | null IN (1, 2, 3) +--------------------+--------------------+-----------------------+------------------- +null |true |null | null +; + +inWithNullHandlingAndNegation +SELECT NOT 2 IN (1, null, 3), NOT 3 IN (1, null, 3), NOT null IN (1, null, 3), NOT null IN (1, 2, 3); + + NOT 2 IN (1, null, 3) | NOT 3 IN (1, null, 3) | NOT null IN (1, null, 3) | null IN (1, 2, 3) +------------------------+------------------------+---------------------------+-------------------- +null |false |null | null +; + // // SELECT with IN and table columns // @@ -64,4 +80,23 @@ SELECT 1 IN (1, abs(2 - 4), 3) OR emp_no NOT IN (10000, 10000 + 1, 10002) FROM t 10003 10004 10005 -; \ No newline at end of file +; + +inWithTableColumnAndNullHandling +SELECT emp_no, birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)), birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), null, CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) FROM test_emp WHERE emp_no = 10038 OR emp_no = 10039 OR emp_no = 10040 ORDER BY 1; + + emp_no | birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) | birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), null, CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) +--------+-------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------ +10038 | true | true +10039 | null | null +10040 | false | null + + +inWithTableColumnAndNullHandlingAndNegation +SELECT emp_no, NOT birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)), NOT birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), null, CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) FROM test_emp WHERE emp_no = 10038 OR emp_no = 10039 OR emp_no = 10040 ORDER BY 1; + + emp_no | NOT birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) | NOT birth_date in (CAST('2018-10-01T00:00:00Z' AS TIMESTAMP), null, CAST('1959-10-01T00:00:00Z' AS TIMESTAMP)) +--------+-----------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------ +10038 | false | false +10039 | null | null +10040 | true | null \ No newline at end of file From 795d57b4f91f9b7d90a8a322e282dcbe13f866f3 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 24 Oct 2018 16:05:50 +0300 Subject: [PATCH 28/67] Auto configure all test tasks (#34666) With this change, we apply the common test config automatically to all newly created tasks instead of opting in specifically. For plugin authors using the plugin externally this means that the configuration will be applied to their RandomizedTestingTasks as well. The purpose of the task is to simplify setup and make it easier to change projects that use the `test` task but actually run integration tests to use a task called `integTest` for clarity, but also because we may want to configure and run them differently. E.x. using different levels of concurrency. --- .../elasticsearch/gradle/BuildPlugin.groovy | 25 ++++++------------- .../gradle/test/RestIntegTestTask.groovy | 2 -- .../test/StandaloneRestTestPlugin.groovy | 1 + .../gradle/test/StandaloneTestPlugin.groovy | 2 -- plugins/repository-s3/build.gradle | 6 ++++- server/build.gradle | 3 --- x-pack/plugin/ccr/build.gradle | 2 -- x-pack/plugin/ml/build.gradle | 2 -- x-pack/plugin/monitoring/build.gradle | 2 -- x-pack/plugin/upgrade/build.gradle | 2 -- 10 files changed, 13 insertions(+), 34 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 6a82f90bb0e..4d3fe8f19fc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -99,12 +99,14 @@ class BuildPlugin implements Plugin { configureSourcesJar(project) configurePomGeneration(project) + applyCommonTestConfig(project) configureTest(project) configurePrecommit(project) configureDependenciesInfo(project) } + /** Performs checks on the build environment and prints information about the build environment. */ static void globalBuildInfo(Project project) { if (project.rootProject.ext.has('buildChecksDone') == false) { @@ -776,9 +778,8 @@ class BuildPlugin implements Plugin { } } - /** Returns a closure of common configuration shared by unit and integration tests. */ - static Closure commonTestConfig(Project project) { - return { + static void applyCommonTestConfig(Project project) { + project.tasks.withType(RandomizedTestingTask) { jvm "${project.runtimeJavaHome}/bin/java" parallelism System.getProperty('tests.jvms', 'auto') ifNoTests System.getProperty('tests.ifNoTests', 'fail') @@ -873,6 +874,8 @@ class BuildPlugin implements Plugin { exclude '**/*$*.class' + dependsOn(project.tasks.testClasses) + project.plugins.withType(ShadowPlugin).whenPluginAdded { // Test against a shadow jar if we made one classpath -= project.tasks.compileJava.outputs.files @@ -884,23 +887,9 @@ class BuildPlugin implements Plugin { /** Configures the test task */ static Task configureTest(Project project) { - RandomizedTestingTask test = project.tasks.getByName('test') - test.configure(commonTestConfig(project)) - test.configure { + project.tasks.getByName('test') { include '**/*Tests.class' } - - // Add a method to create additional unit tests for a project, which will share the same - // randomized testing setup, but by default run no tests. - project.extensions.add('additionalTest', { String name, Closure config -> - RandomizedTestingTask additionalTest = project.tasks.create(name, RandomizedTestingTask.class) - additionalTest.classpath = test.classpath - additionalTest.testClassesDirs = test.testClassesDirs - additionalTest.configure(commonTestConfig(project)) - additionalTest.configure(config) - additionalTest.dependsOn(project.tasks.testClasses) - project.check.dependsOn(additionalTest) - }); } private static configurePrecommit(Project project) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index 2838849981a..689cf5bf2ed 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -64,8 +64,6 @@ public class RestIntegTestTask extends DefaultTask { runner.testClassesDirs = project.sourceSets.test.output.classesDirs clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project) - // start with the common test configuration - runner.configure(BuildPlugin.commonTestConfig(project)) // override/add more for rest tests runner.parallelism = '1' runner.include('**/*IT.class') diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy index a5d3b41339d..9e41466ebdd 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy @@ -50,6 +50,7 @@ public class StandaloneRestTestPlugin implements Plugin { project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) BuildPlugin.globalBuildInfo(project) BuildPlugin.configureRepositories(project) + BuildPlugin.applyCommonTestConfig(project) // only setup tests to build project.sourceSets.create('test') diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy index e38163d6166..95818240cda 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneTestPlugin.groovy @@ -24,7 +24,6 @@ import org.elasticsearch.gradle.BuildPlugin import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.plugins.JavaBasePlugin -import org.gradle.api.tasks.compile.JavaCompile /** * Configures the build to compile against Elasticsearch's test framework and @@ -44,7 +43,6 @@ public class StandaloneTestPlugin implements Plugin { description: 'Runs unit tests that are separate' ] RandomizedTestingTask test = project.tasks.create(testOptions) - test.configure(BuildPlugin.commonTestConfig(project)) BuildPlugin.configureCompile(project) test.classpath = project.sourceSets.test.runtimeClasspath test.testClassesDirs = project.sourceSets.test.output.classesDirs diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 3895500e55b..888a9842833 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -4,6 +4,7 @@ import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.test.AntFixture import org.elasticsearch.gradle.test.ClusterConfiguration import org.elasticsearch.gradle.test.RestIntegTestTask +import com.carrotsearch.gradle.junit4.RandomizedTestingTask import java.lang.reflect.Field @@ -68,11 +69,14 @@ bundlePlugin { } } -additionalTest('testRepositoryCreds'){ +task testRepositoryCreds(type: RandomizedTestingTask) { include '**/RepositoryCredentialsTests.class' include '**/S3BlobStoreRepositoryTests.class' systemProperty 'es.allow_insecure_settings', 'true' + classpath = tasks.test.classpath + testClassesDirs = tasks.test.testClassesDirs } +project.check.dependsOn(testRepositoryCreds) test { // these are tested explicitly in separate test tasks diff --git a/server/build.gradle b/server/build.gradle index 85c7f45cf7e..412e0677827 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -17,9 +17,7 @@ * under the License. */ - import com.carrotsearch.gradle.junit4.RandomizedTestingTask -import org.elasticsearch.gradle.BuildPlugin apply plugin: 'elasticsearch.build' apply plugin: 'nebula.optional-base' @@ -322,7 +320,6 @@ if (isEclipse == false || project.path == ":server-tests") { group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', dependsOn: test.dependsOn) { - configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs include '**/*IT.class' diff --git a/x-pack/plugin/ccr/build.gradle b/x-pack/plugin/ccr/build.gradle index ea8aa897777..fe5341ef993 100644 --- a/x-pack/plugin/ccr/build.gradle +++ b/x-pack/plugin/ccr/build.gradle @@ -1,5 +1,4 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingTask -import org.elasticsearch.gradle.BuildPlugin evaluationDependsOn(xpackModule('core')) @@ -25,7 +24,6 @@ task internalClusterTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Java fantasy integration tests', dependsOn: test.dependsOn) { - configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs include '**/*IT.class' diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 8dd5e61bbc4..f3a6dc8b7a4 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,5 +1,4 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingTask -import org.elasticsearch.gradle.BuildPlugin evaluationDependsOn(xpackModule('core')) @@ -99,7 +98,6 @@ task internalClusterTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', dependsOn: test.dependsOn) { - configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs include '**/*IT.class' diff --git a/x-pack/plugin/monitoring/build.gradle b/x-pack/plugin/monitoring/build.gradle index e551d577b7b..54df68e769c 100644 --- a/x-pack/plugin/monitoring/build.gradle +++ b/x-pack/plugin/monitoring/build.gradle @@ -1,5 +1,4 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingTask -import org.elasticsearch.gradle.BuildPlugin evaluationDependsOn(xpackModule('core')) @@ -61,7 +60,6 @@ task internalClusterTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', dependsOn: test.dependsOn) { - configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs include '**/*IT.class' diff --git a/x-pack/plugin/upgrade/build.gradle b/x-pack/plugin/upgrade/build.gradle index 56ce274dd11..309962fa487 100644 --- a/x-pack/plugin/upgrade/build.gradle +++ b/x-pack/plugin/upgrade/build.gradle @@ -1,5 +1,4 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingTask -import org.elasticsearch.gradle.BuildPlugin evaluationDependsOn(xpackModule('core')) @@ -34,7 +33,6 @@ task internalClusterTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', dependsOn: test.dependsOn) { - configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs include '**/*IT.class' From 0f1a5ecf6118e939a472331537f96d55f6fddda3 Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Wed, 24 Oct 2018 07:20:46 -0700 Subject: [PATCH 29/67] Adding stack_monitoring_agent role (#34369) * Adding stack_monitoring_agent role * Fixing checkstyle issues * Adding tests for new role * Tighten up privileges around index templates * s/stack_monitoring_user/remote_monitoring_collector/ + remote_monitoring_user * Fixing checkstyle violation * Fix test * Removing unused field * Adding missed code * Fixing data type * Update Integration Test for new builtin user --- .../authc/esnative/ClientReservedRealm.java | 1 + .../authz/store/ReservedRolesStore.java | 19 ++++- .../security/user/RemoteMonitoringUser.java | 25 ++++++ .../core/security/user/UsernamesField.java | 4 + .../authz/store/ReservedRolesStoreTests.java | 85 ++++++++++++++++--- .../authc/esnative/ReservedRealm.java | 8 ++ .../esnative/tool/SetupPasswordTool.java | 3 +- .../test/NativeRealmIntegTestCase.java | 6 +- .../authc/esnative/NativeUsersStoreTests.java | 5 +- .../esnative/ReservedRealmIntegTests.java | 7 +- .../authc/esnative/ReservedRealmTests.java | 15 +++- .../esnative/tool/SetupPasswordToolIT.java | 2 +- 12 files changed, 155 insertions(+), 25 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/RemoteMonitoringUser.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java index 5a228133073..81b98e34996 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/esnative/ClientReservedRealm.java @@ -20,6 +20,7 @@ public class ClientReservedRealm { case UsernamesField.LOGSTASH_NAME: case UsernamesField.BEATS_NAME: case UsernamesField.APM_NAME: + case UsernamesField.REMOTE_MONITORING_NAME: return XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); default: return AnonymousUser.isAnonymousUsername(username, settings); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 3999e9ad3d0..24b17976f4c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -72,8 +72,25 @@ public class ReservedRolesStore implements BiConsumer, ActionListene "cluster:admin/xpack/watcher/watch/delete", }, new RoleDescriptor.IndicesPrivileges[] { - RoleDescriptor.IndicesPrivileges.builder().indices(".monitoring-*").privileges("all").build() }, + RoleDescriptor.IndicesPrivileges.builder().indices(".monitoring-*").privileges("all").build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("metricbeat-*").privileges("index", "create_index").build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("remote_monitoring_collector", new RoleDescriptor( + "remote_monitoring_collector", + new String[] { + "monitor" + }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("monitor").build(), + RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*").privileges("read").build() + }, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null + )) .put("ingest_admin", new RoleDescriptor("ingest_admin", new String[] { "manage_index_templates", "manage_pipeline" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) // reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/RemoteMonitoringUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/RemoteMonitoringUser.java new file mode 100644 index 00000000000..ad51c575d72 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/RemoteMonitoringUser.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.user; + +import org.elasticsearch.Version; +import org.elasticsearch.xpack.core.security.support.MetadataUtils; + +/** + * Built in user for remote monitoring: collection as well as indexing. + */ +public class RemoteMonitoringUser extends User { + + public static final String NAME = UsernamesField.REMOTE_MONITORING_NAME; + public static final String COLLECTION_ROLE_NAME = UsernamesField.REMOTE_MONITORING_COLLECTION_ROLE; + public static final String INDEXING_ROLE_NAME = UsernamesField.REMOTE_MONITORING_INDEXING_ROLE; + + public static final Version DEFINED_SINCE = Version.V_6_5_0; + + public RemoteMonitoringUser(boolean enabled) { + super(NAME, new String[]{ COLLECTION_ROLE_NAME, INDEXING_ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java index bd886567ed1..0a593ad9928 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java @@ -23,5 +23,9 @@ public final class UsernamesField { public static final String APM_NAME = "apm_system"; public static final String APM_ROLE = "apm_system"; + public static final String REMOTE_MONITORING_NAME = "remote_monitoring_user"; + public static final String REMOTE_MONITORING_COLLECTION_ROLE = "remote_monitoring_collector"; + public static final String REMOTE_MONITORING_INDEXING_ROLE = "remote_monitoring_agent"; + private UsernamesField() {} } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 9972fc7b74b..bdb8c09d484 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; +import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; @@ -97,6 +98,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivileg import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; @@ -136,7 +138,6 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(ReservedRolesStore.isReserved("transport_client"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_user"), is(true)); assertThat(ReservedRolesStore.isReserved("ingest_admin"), is(true)); - assertThat(ReservedRolesStore.isReserved("remote_monitoring_agent"), is(true)); assertThat(ReservedRolesStore.isReserved("monitoring_user"), is(true)); assertThat(ReservedRolesStore.isReserved("reporting_user"), is(true)); assertThat(ReservedRolesStore.isReserved("machine_learning_user"), is(true)); @@ -149,6 +150,8 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(ReservedRolesStore.isReserved(LogstashSystemUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(BeatsSystemUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(APMSystemUser.ROLE_NAME), is(true)); + assertThat(ReservedRolesStore.isReserved(RemoteMonitoringUser.COLLECTION_ROLE_NAME), is(true)); + assertThat(ReservedRolesStore.isReserved(RemoteMonitoringUser.INDEXING_ROLE_NAME), is(true)); } public void testIngestAdminRole() { @@ -399,17 +402,75 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo") .test(randomAlphaOfLengthBetween(8, 24)), is(false)); - final String index = ".monitoring-" + randomAlphaOfLength(randomIntBetween(0, 13)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); - assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(index), is(true)); + final String monitoringIndex = ".monitoring-" + randomAlphaOfLength(randomIntBetween(0, 13)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo").test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:bar").test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetAction.NAME).test(monitoringIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(monitoringIndex), is(true)); + + final String metricbeatIndex = "metricbeat-" + randomAlphaOfLength(randomIntBetween(0, 13)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo").test(metricbeatIndex), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:bar").test(metricbeatIndex), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(metricbeatIndex), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(metricbeatIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(metricbeatIndex), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(metricbeatIndex), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(metricbeatIndex), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(metricbeatIndex), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetAction.NAME).test(metricbeatIndex), is(false)); + + } + + public void testRemoteMonitoringCollectorRole() { + final TransportRequest request = mock(TransportRequest.class); + + RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("remote_monitoring_collector"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + Role remoteMonitoringAgentRole = Role.builder(roleDescriptor, null).build(); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterHealthAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStateAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterStatsAction.NAME, request), is(true)); + assertThat(remoteMonitoringAgentRole.cluster().check(GetIndexTemplatesAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(DeleteIndexTemplateAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterRerouteAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false)); + assertThat(remoteMonitoringAgentRole.cluster().check(MonitoringBulkAction.NAME, request), is(false)); + + assertThat(remoteMonitoringAgentRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); + + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(RecoveryAction.NAME).test("foo"), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test("foo"), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".reporting"), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(".kibana"), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetAction.NAME).test(".kibana"), is(true)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo") + .test(randomAlphaOfLengthBetween(8, 24)), is(false)); + + Arrays.asList( + ".monitoring-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "metricbeat-" + randomAlphaOfLength(randomIntBetween(0, 13)) + ).forEach((index) -> { + logger.info("index name [{}]", index); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(false)); + assertThat(remoteMonitoringAgentRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(index), is(false)); + }); } public void testReportingUserRole() { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 33726671911..2cf548eb4e1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; @@ -152,6 +153,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { return new BeatsSystemUser(userInfo.enabled); case APMSystemUser.NAME: return new APMSystemUser(userInfo.enabled); + case RemoteMonitoringUser.NAME: + return new RemoteMonitoringUser(userInfo.enabled); default: if (anonymousEnabled && anonymousUser.principal().equals(username)) { return anonymousUser; @@ -183,6 +186,9 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { userInfo = reservedUserInfos.get(APMSystemUser.NAME); users.add(new APMSystemUser(userInfo == null || userInfo.enabled)); + userInfo = reservedUserInfos.get(RemoteMonitoringUser.NAME); + users.add(new RemoteMonitoringUser(userInfo == null || userInfo.enabled)); + if (anonymousEnabled) { users.add(anonymousUser); } @@ -236,6 +242,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm { return BeatsSystemUser.DEFINED_SINCE; case APMSystemUser.NAME: return APMSystemUser.DEFINED_SINCE; + case RemoteMonitoringUser.NAME: + return RemoteMonitoringUser.DEFINED_SINCE; default: return Version.V_6_0_0; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java index fad10c821c8..691142a9405 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordTool.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.esnative.tool.HttpResponse.HttpResponseBuilder; @@ -65,7 +66,7 @@ public class SetupPasswordTool extends LoggingAwareMultiCommand { private static final char[] CHARS = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789").toCharArray(); public static final List USERS = asList(ElasticUser.NAME, APMSystemUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, - BeatsSystemUser.NAME); + BeatsSystemUser.NAME, RemoteMonitoringUser.NAME); private final BiFunction clientFunction; private final CheckedFunction keyStoreFunction; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java index 63a38b12a9e..bc235e0918e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/NativeRealmIntegTestCase.java @@ -17,12 +17,14 @@ import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.util.Arrays; +import java.util.List; import java.util.Set; /** @@ -89,7 +91,9 @@ public abstract class NativeRealmIntegTestCase extends SecurityIntegTestCase { RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword)); RequestOptions options = optionsBuilder.build(); - for (String username : Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME)) { + final List usernames = Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME, + RemoteMonitoringUser.NAME); + for (String username : usernames) { Request request = new Request("PUT", "/_xpack/security/user/" + username + "/_password"); request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}"); request.setOptions(options); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index 014599dedae..41bd8bfc6e6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.Before; @@ -83,7 +84,7 @@ public class NativeUsersStoreTests extends ESTestCase { final NativeUsersStore nativeUsersStore = startNativeUsersStore(); final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, - BeatsSystemUser.NAME, APMSystemUser.NAME); + BeatsSystemUser.NAME, APMSystemUser.NAME, RemoteMonitoringUser.NAME); final PlainActionFuture future = new PlainActionFuture<>(); nativeUsersStore.setEnabled(user, true, WriteRequest.RefreshPolicy.IMMEDIATE, future); @@ -102,7 +103,7 @@ public class NativeUsersStoreTests extends ESTestCase { final NativeUsersStore nativeUsersStore = startNativeUsersStore(); final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, - BeatsSystemUser.NAME, APMSystemUser.NAME); + BeatsSystemUser.NAME, APMSystemUser.NAME, RemoteMonitoringUser.NAME); final Map values = new HashMap<>(); values.put(ENABLED_FIELD, Boolean.TRUE); values.put(PASSWORD_FIELD, BLANK_PASSWORD); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java index 8f7116dd971..59612d6227a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.junit.BeforeClass; import java.util.Arrays; @@ -52,7 +53,7 @@ public class ReservedRealmIntegTests extends NativeRealmIntegTestCase { public void testAuthenticate() { final List usernames = Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, - BeatsSystemUser.NAME, APMSystemUser.NAME); + BeatsSystemUser.NAME, APMSystemUser.NAME, RemoteMonitoringUser.NAME); for (String username : usernames) { ClusterHealthResponse response = client() .filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword()))) @@ -72,7 +73,7 @@ public class ReservedRealmIntegTests extends NativeRealmIntegTestCase { public void testAuthenticateAfterEnablingUser() { final SecurityClient c = securityClient(); final List usernames = Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, - BeatsSystemUser.NAME, APMSystemUser.NAME); + BeatsSystemUser.NAME, APMSystemUser.NAME, RemoteMonitoringUser.NAME); for (String username : usernames) { c.prepareSetEnabled(username, true).get(); ClusterHealthResponse response = client() @@ -88,7 +89,7 @@ public class ReservedRealmIntegTests extends NativeRealmIntegTestCase { public void testChangingPassword() { String username = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, - BeatsSystemUser.NAME, APMSystemUser.NAME); + BeatsSystemUser.NAME, APMSystemUser.NAME, RemoteMonitoringUser.NAME); final char[] newPassword = "supersecretvalue".toCharArray(); if (randomBoolean()) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index 36d1690b8b2..53963f996da 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.LogstashSystemUser; +import org.elasticsearch.xpack.core.security.user.RemoteMonitoringUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.UsernamesField; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore.ReservedUserInfo; @@ -264,7 +265,7 @@ public class ReservedRealmTests extends ESTestCase { reservedRealm.users(userFuture); assertThat(userFuture.actionGet(), containsInAnyOrder(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true), - new BeatsSystemUser(true), new APMSystemUser((true)))); + new BeatsSystemUser(true), new APMSystemUser(true), new RemoteMonitoringUser(true))); } public void testGetUsersDisabled() { @@ -396,7 +397,8 @@ public class ReservedRealmTests extends ESTestCase { new AnonymousUser(Settings.EMPTY), securityIndex, threadPool); PlainActionFuture listener = new PlainActionFuture<>(); - final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME); + final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME, + RemoteMonitoringUser.NAME); doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[1]; callback.onResponse(null); @@ -418,7 +420,8 @@ public class ReservedRealmTests extends ESTestCase { new AnonymousUser(Settings.EMPTY), securityIndex, threadPool); PlainActionFuture listener = new PlainActionFuture<>(); - final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME); + final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME, + RemoteMonitoringUser.NAME); reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, mockSecureSettings.getString("bootstrap.password")), listener); final AuthenticationResult result = listener.get(); assertThat(result.getStatus(), is(AuthenticationResult.Status.TERMINATE)); @@ -426,7 +429,7 @@ public class ReservedRealmTests extends ESTestCase { private User randomReservedUser(boolean enabled) { return randomFrom(new ElasticUser(enabled), new KibanaUser(enabled), new LogstashSystemUser(enabled), - new BeatsSystemUser(enabled), new APMSystemUser(enabled)); + new BeatsSystemUser(enabled), new APMSystemUser(enabled), new RemoteMonitoringUser(enabled)); } /* @@ -459,6 +462,10 @@ public class ReservedRealmTests extends ESTestCase { assertThat(versionPredicate.test(Version.V_6_4_0), is(false)); assertThat(versionPredicate.test(Version.V_6_5_0), is(true)); break; + case RemoteMonitoringUser.NAME: + assertThat(versionPredicate.test(Version.V_6_4_0), is(false)); + assertThat(versionPredicate.test(Version.V_6_5_0), is(true)); + break; default: assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); break; diff --git a/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java b/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java index 860c30c0ddd..974f67825f7 100644 --- a/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java +++ b/x-pack/qa/security-setup-password-tests/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolIT.java @@ -98,7 +98,7 @@ public class SetupPasswordToolIT extends ESRestTestCase { } }); - assertEquals(5, userPasswordMap.size()); + assertEquals(6, userPasswordMap.size()); userPasswordMap.entrySet().forEach(entry -> { final String basicHeader = "Basic " + Base64.getEncoder().encodeToString((entry.getKey() + ":" + entry.getValue()).getBytes(StandardCharsets.UTF_8)); From 4370b44ef000c9038c3b97fe0d1d26e20c0ca55b Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 24 Oct 2018 18:15:25 +0300 Subject: [PATCH 30/67] Rename testcluster extension to align with plugin name (#34751) --- .../gradle/testclusters/TestClustersPlugin.java | 6 +++--- .../gradle/testclusters/TestClustersPluginIT.java | 4 ++-- buildSrc/src/testKit/testclusters/build.gradle | 14 +++++++------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 73aad33b8ea..fcd83a1f461 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -39,8 +39,8 @@ import java.util.Map; public class TestClustersPlugin implements Plugin { - public static final String LIST_TASK_NAME = "listElasticSearchClusters"; - public static final String EXTENSION_NAME = "elasticSearchClusters"; + private static final String LIST_TASK_NAME = "listTestClusters"; + private static final String NODE_EXTENSION_NAME = "testClusters"; private final Logger logger = Logging.getLogger(TestClustersPlugin.class); @@ -50,7 +50,7 @@ public class TestClustersPlugin implements Plugin { ElasticsearchNode.class, (name) -> new ElasticsearchNode(name, GradleServicesAdapter.getInstance(project)) ); - project.getExtensions().add(EXTENSION_NAME, container); + project.getExtensions().add(NODE_EXTENSION_NAME, container); Task listTask = project.getTasks().create(LIST_TASK_NAME); listTask.setGroup("ES cluster formation"); diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java index 021bd9bb151..f1461dbbd3d 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/testclusters/TestClustersPluginIT.java @@ -31,11 +31,11 @@ public class TestClustersPluginIT extends GradleIntegrationTestCase { public void testListClusters() { BuildResult result = GradleRunner.create() .withProjectDir(getProjectDir("testclusters")) - .withArguments("listElasticSearchClusters", "-s") + .withArguments("listTestClusters", "-s") .withPluginClasspath() .build(); - assertEquals(TaskOutcome.SUCCESS, result.task(":listElasticSearchClusters").getOutcome()); + assertEquals(TaskOutcome.SUCCESS, result.task(":listTestClusters").getOutcome()); assertOutputContains( result.getOutput(), " * myTestCluster:" diff --git a/buildSrc/src/testKit/testclusters/build.gradle b/buildSrc/src/testKit/testclusters/build.gradle index 083ce97b963..470111f056e 100644 --- a/buildSrc/src/testKit/testclusters/build.gradle +++ b/buildSrc/src/testKit/testclusters/build.gradle @@ -2,40 +2,40 @@ plugins { id 'elasticsearch.testclusters' } -elasticSearchClusters { +testClusters { myTestCluster { distribution = 'ZIP' } } task user1 { - useCluster elasticSearchClusters.myTestCluster + useCluster testClusters.myTestCluster doLast { println "user1 executing" } } task user2 { - useCluster elasticSearchClusters.myTestCluster + useCluster testClusters.myTestCluster doLast { println "user2 executing" } } task upToDate1 { - useCluster elasticSearchClusters.myTestCluster + useCluster testClusters.myTestCluster } task upToDate2 { - useCluster elasticSearchClusters.myTestCluster + useCluster testClusters.myTestCluster } task skipped1 { enabled = false - useCluster elasticSearchClusters.myTestCluster + useCluster testClusters.myTestCluster } task skipped2 { enabled = false - useCluster elasticSearchClusters.myTestCluster + useCluster testClusters.myTestCluster } From e0d7808148c6904dfdc66622587052a92d6a4f0d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 24 Oct 2018 11:15:44 -0400 Subject: [PATCH 31/67] REST: No strict warning testing for head body test Our tests that HEAD requests don't have a body was hitting a warning. For now we'll run that test without "strict" warnings mode enabled. --- .../org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java index 17b374ecb37..185916ffe3c 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -38,7 +38,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { - public void testHeadRoot() throws IOException { headTestCase("/", emptyMap(), greaterThan(0)); headTestCase("/", singletonMap("pretty", ""), greaterThan(0)); @@ -75,6 +74,12 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { headTestCase("/test", singletonMap("pretty", "true"), greaterThan(0)); } + @Override + protected boolean getStrictDeprecationMode() { + // Remove this override when we remove the reference to types below + return false; + } + public void testTypeExists() throws IOException { createTestDoc(); headTestCase("/test/_mapping/test", emptyMap(), greaterThan(0)); From 0a85997cb00708c8b0ab5da22e3742a4b4ee07d1 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 24 Oct 2018 17:18:52 +0200 Subject: [PATCH 32/67] TESTS: Use File Based Discovery in REST Tests (#34560) * For `6.5+` use file based discovery in REST tests * Relates #33675 --- .../gradle/test/ClusterConfiguration.groovy | 4 +- .../gradle/test/ClusterFormationTasks.groovy | 51 +++++++++++++++---- 2 files changed, 45 insertions(+), 10 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index b29bb7a8cd3..d2eb6cc60a5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -68,7 +68,9 @@ class ClusterConfiguration { * In case of more than one node, this defaults to the number of nodes */ @Input - Closure minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes() : -1 } + Closure minimumMasterNodes = { + return getNumNodes() > 1 ? getNumNodes() : -1 + } @Input String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') + diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index ecf3e342040..e08fd3f6b75 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -122,8 +122,31 @@ class ClusterFormationTasks { } NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir) nodes.add(node) - Object dependsOn = startTasks.empty ? startDependencies : startTasks.get(0) - startTasks.add(configureNode(project, prefix, runner, dependsOn, node, config, distro, nodes.get(0))) + Closure writeConfigSetup + Object dependsOn + if (node.nodeVersion.onOrAfter("6.5.0-SNAPSHOT")) { + writeConfigSetup = { Map esConfig -> + // Don't force discovery provider if one is set by the test cluster specs already + if (esConfig.containsKey('discovery.zen.hosts_provider') == false) { + esConfig['discovery.zen.hosts_provider'] = 'file' + } + esConfig['discovery.zen.ping.unicast.hosts'] = [] + esConfig + } + dependsOn = startDependencies + } else { + dependsOn = startTasks.empty ? startDependencies : startTasks.get(0) + writeConfigSetup = { Map esConfig -> + String unicastTransportUri = node.config.unicastTransportUri(nodes.get(0), node, project.ant) + if (unicastTransportUri == null) { + esConfig['discovery.zen.ping.unicast.hosts'] = [] + } else { + esConfig['discovery.zen.ping.unicast.hosts'] = "\"${unicastTransportUri}\"" + } + esConfig + } + } + startTasks.add(configureNode(project, prefix, runner, dependsOn, node, config, distro, writeConfigSetup)) } Task wait = configureWaitTask("${prefix}#wait", project, nodes, startTasks, config.nodeStartupWaitSeconds) @@ -182,7 +205,7 @@ class ClusterFormationTasks { * @return a task which starts the node. */ static Task configureNode(Project project, String prefix, Task runner, Object dependsOn, NodeInfo node, ClusterConfiguration config, - Configuration distribution, NodeInfo seedNode) { + Configuration distribution, Closure writeConfig) { // tasks are chained so their execution order is maintained Task setup = project.tasks.create(name: taskName(prefix, node, 'clean'), type: Delete, dependsOn: dependsOn) { @@ -198,7 +221,7 @@ class ClusterFormationTasks { setup = configureCheckPreviousTask(taskName(prefix, node, 'checkPrevious'), project, setup, node) setup = configureStopTask(taskName(prefix, node, 'stopPrevious'), project, setup, node) setup = configureExtractTask(taskName(prefix, node, 'extract'), project, setup, node, distribution) - setup = configureWriteConfigTask(taskName(prefix, node, 'configure'), project, setup, node, seedNode) + setup = configureWriteConfigTask(taskName(prefix, node, 'configure'), project, setup, node, writeConfig) setup = configureCreateKeystoreTask(taskName(prefix, node, 'createKeystore'), project, setup, node) setup = configureAddKeystoreSettingTasks(prefix, project, setup, node) setup = configureAddKeystoreFileTasks(prefix, project, setup, node) @@ -301,7 +324,7 @@ class ClusterFormationTasks { } /** Adds a task to write elasticsearch.yml for the given node configuration */ - static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node, NodeInfo seedNode) { + static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node, Closure configFilter) { Map esConfig = [ 'cluster.name' : node.clusterName, 'node.name' : "node-" + node.nodeNum, @@ -347,10 +370,7 @@ class ClusterFormationTasks { Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) writeConfig.doFirst { - String unicastTransportUri = node.config.unicastTransportUri(seedNode, node, project.ant) - if (unicastTransportUri != null) { - esConfig['discovery.zen.ping.unicast.hosts'] = "\"${unicastTransportUri}\"" - } + esConfig = configFilter.call(esConfig) File configFile = new File(node.pathConf, 'elasticsearch.yml') logger.info("Configuring ${configFile}") configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8') @@ -681,6 +701,19 @@ class ClusterFormationTasks { static Task configureWaitTask(String name, Project project, List nodes, List startTasks, int waitSeconds) { Task wait = project.tasks.create(name: name, dependsOn: startTasks) wait.doLast { + + Collection unicastHosts = new HashSet<>() + nodes.forEach { otherNode -> + String unicastHost = otherNode.config.unicastTransportUri(otherNode, null, project.ant) + if (unicastHost != null) { + unicastHosts.addAll(Arrays.asList(unicastHost.split(","))) + } + } + String unicastHostsTxt = String.join("\n", unicastHosts) + nodes.forEach { node -> + node.pathConf.toPath().resolve("unicast_hosts.txt").setText(unicastHostsTxt) + } + ant.waitfor(maxwait: "${waitSeconds}", maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${name}") { or { for (NodeInfo node : nodes) { From d73768f812a305fd7ce71163994452e2629bb184 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 24 Oct 2018 11:19:39 -0400 Subject: [PATCH 33/67] CCR: Do not follow if leader does not have soft-deletes (#34767) We should not create a follower index and abort a follow request if the leader does not have soft-deletes. Moreover, we also should not auto-follow an index if it does not have soft-deletes. --- .../ccr/action/AutoFollowCoordinator.java | 5 +++- .../ccr/action/TransportPutFollowAction.java | 4 ++++ .../xpack/ccr/LocalIndexFollowingIT.java | 15 ++++++++++++ .../action/AutoFollowCoordinatorTests.java | 24 +++++++++---------- 4 files changed, 34 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index cbd31e4bae0..a05dc0914e5 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; @@ -370,7 +371,9 @@ public class AutoFollowCoordinator implements ClusterStateApplier { // has a leader index uuid custom metadata entry that matches with uuid of leaderIndexMetaData variable // If so then handle it differently: not follow it, but just add an entry to // AutoFollowMetadata#followedLeaderIndexUUIDs - leaderIndicesToFollow.add(leaderIndexMetaData.getIndex()); + if (leaderIndexMetaData.getSettings().getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false)) { + leaderIndicesToFollow.add(leaderIndexMetaData.getIndex()); + } } } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index d05f17000d9..cdf496cc032 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -125,6 +125,10 @@ public final class TransportPutFollowAction listener.onFailure(new IllegalArgumentException("leader index [" + request.getLeaderIndex() + "] does not exist")); return; } + if (leaderIndexMetaData.getSettings().getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) == false) { + listener.onFailure( + new IllegalArgumentException("leader index [" + request.getLeaderIndex() + "] does not have soft deletes enabled")); + } ActionListener handler = ActionListener.wrap( result -> { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java index 51639f184ca..9edc1637dd1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/LocalIndexFollowingIT.java @@ -67,6 +67,21 @@ public class LocalIndexFollowingIT extends CcrSingleNodeTestCase { }); } + public void testDoNotCreateFollowerIfLeaderDoesNotHaveSoftDeletes() throws Exception { + final String leaderIndexSettings = getIndexSettings(2, 0, + singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "false")); + assertAcked(client().admin().indices().prepareCreate("leader-index").setSource(leaderIndexSettings, XContentType.JSON)); + ResumeFollowAction.Request followRequest = getResumeFollowRequest(); + followRequest.setFollowerIndex("follower-index"); + PutFollowAction.Request putFollowRequest = getPutFollowRequest(); + putFollowRequest.setLeaderIndex("leader-index"); + putFollowRequest.setFollowRequest(followRequest); + IllegalArgumentException error = expectThrows(IllegalArgumentException.class, + () -> client().execute(PutFollowAction.INSTANCE, putFollowRequest).actionGet()); + assertThat(error.getMessage(), equalTo("leader index [leader-index] does not have soft deletes enabled")); + assertThat(client().admin().indices().prepareExists("follower-index").get().isExists(), equalTo(false)); + } + private String getIndexSettings(final int numberOfShards, final int numberOfReplicas, final Map additionalIndexSettings) throws IOException { final String settings; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index c7aac7576b1..8a83be680ff 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollower; @@ -50,7 +51,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { ClusterState leaderState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().put(IndexMetaData.builder("logs-20190101") - .settings(settings(Version.CURRENT)) + .settings(settings(Version.CURRENT).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) .numberOfShards(1) .numberOfReplicas(0))) .build(); @@ -172,7 +173,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { ClusterState leaderState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().put(IndexMetaData.builder("logs-20190101") - .settings(settings(Version.CURRENT)) + .settings(settings(Version.CURRENT).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) .numberOfShards(1) .numberOfReplicas(0))) .build(); @@ -235,7 +236,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { ClusterState leaderState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().put(IndexMetaData.builder("logs-20190101") - .settings(settings(Version.CURRENT)) + .settings(settings(Version.CURRENT).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)) .numberOfShards(1) .numberOfReplicas(0))) .build(); @@ -306,7 +307,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { for (int i = 0; i < 5; i++) { Settings.Builder builder = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_INDEX_UUID, "metrics-" + i); + .put(IndexMetaData.SETTING_INDEX_UUID, "metrics-" + i) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), i % 2 == 0); imdBuilder.put(IndexMetaData.builder("metrics-" + i) .settings(builder) .numberOfShards(1) @@ -324,21 +326,17 @@ public class AutoFollowCoordinatorTests extends ESTestCase { List result = AutoFollower.getLeaderIndicesToFollow("remote", autoFollowPattern, leaderState, followerState, Collections.emptyList()); result.sort(Comparator.comparing(Index::getName)); - assertThat(result.size(), equalTo(5)); + assertThat(result.size(), equalTo(3)); assertThat(result.get(0).getName(), equalTo("metrics-0")); - assertThat(result.get(1).getName(), equalTo("metrics-1")); - assertThat(result.get(2).getName(), equalTo("metrics-2")); - assertThat(result.get(3).getName(), equalTo("metrics-3")); - assertThat(result.get(4).getName(), equalTo("metrics-4")); + assertThat(result.get(1).getName(), equalTo("metrics-2")); + assertThat(result.get(2).getName(), equalTo("metrics-4")); List followedIndexUUIDs = Collections.singletonList(leaderState.metaData().index("metrics-2").getIndexUUID()); result = AutoFollower.getLeaderIndicesToFollow("remote", autoFollowPattern, leaderState, followerState, followedIndexUUIDs); result.sort(Comparator.comparing(Index::getName)); - assertThat(result.size(), equalTo(4)); + assertThat(result.size(), equalTo(2)); assertThat(result.get(0).getName(), equalTo("metrics-0")); - assertThat(result.get(1).getName(), equalTo("metrics-1")); - assertThat(result.get(2).getName(), equalTo("metrics-3")); - assertThat(result.get(3).getName(), equalTo("metrics-4")); + assertThat(result.get(1).getName(), equalTo("metrics-4")); } public void testGetFollowerIndexName() { From f7a6fb288f4e71f6946c6fc0ce1c588c68951286 Mon Sep 17 00:00:00 2001 From: markharwood Date: Wed, 24 Oct 2018 16:58:31 +0100 Subject: [PATCH 34/67] Fix compilation error peculiar to eclipse2018-09 (#34798) Fix compilation error peculiar to eclipse2018-09. Added generics to make eclipse compiler happy --- .../authz/IndicesAndAliasesResolverTests.java | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 4dc0909552c..1b2317925fc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -1126,9 +1126,11 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testRemotableRequestsAllowRemoteIndices() { IndicesOptions options = IndicesOptions.fromOptions(true, false, false, false); Tuple tuple = randomFrom( - new Tuple<>(new SearchRequest("remote:foo").indicesOptions(options), SearchAction.NAME), - new Tuple<>(new FieldCapabilitiesRequest().indices("remote:foo").indicesOptions(options), FieldCapabilitiesAction.NAME), - new Tuple<>(new GraphExploreRequest().indices("remote:foo").indicesOptions(options), GraphExploreAction.NAME) + new Tuple(new SearchRequest("remote:foo").indicesOptions(options), SearchAction.NAME), + new Tuple(new FieldCapabilitiesRequest().indices("remote:foo").indicesOptions(options), + FieldCapabilitiesAction.NAME), + new Tuple(new GraphExploreRequest().indices("remote:foo").indicesOptions(options), + GraphExploreAction.NAME) ); final TransportRequest request = tuple.v1(); ResolvedIndices resolved = resolveIndices(request, buildAuthorizedIndices(user, tuple.v2())); @@ -1143,9 +1145,9 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testNonRemotableRequestDoesNotAllowRemoteIndices() { IndicesOptions options = IndicesOptions.fromOptions(true, false, false, false); Tuple tuple = randomFrom( - new Tuple<>(new CloseIndexRequest("remote:foo").indicesOptions(options), CloseIndexAction.NAME), - new Tuple<>(new DeleteIndexRequest("remote:foo").indicesOptions(options), DeleteIndexAction.NAME), - new Tuple<>(new PutMappingRequest("remote:foo").indicesOptions(options), PutMappingAction.NAME) + new Tuple(new CloseIndexRequest("remote:foo").indicesOptions(options), CloseIndexAction.NAME), + new Tuple(new DeleteIndexRequest("remote:foo").indicesOptions(options), DeleteIndexAction.NAME), + new Tuple(new PutMappingRequest("remote:foo").indicesOptions(options), PutMappingAction.NAME) ); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(tuple.v1(), buildAuthorizedIndices(user, tuple.v2())).getLocal()); @@ -1155,9 +1157,9 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { public void testNonRemotableRequestDoesNotAllowRemoteWildcardIndices() { IndicesOptions options = IndicesOptions.fromOptions(randomBoolean(), true, true, true); Tuple tuple = randomFrom( - new Tuple<>(new CloseIndexRequest("*:*").indicesOptions(options), CloseIndexAction.NAME), - new Tuple<>(new DeleteIndexRequest("*:*").indicesOptions(options), DeleteIndexAction.NAME), - new Tuple<>(new PutMappingRequest("*:*").indicesOptions(options), PutMappingAction.NAME) + new Tuple(new CloseIndexRequest("*:*").indicesOptions(options), CloseIndexAction.NAME), + new Tuple(new DeleteIndexRequest("*:*").indicesOptions(options), DeleteIndexAction.NAME), + new Tuple(new PutMappingRequest("*:*").indicesOptions(options), PutMappingAction.NAME) ); final ResolvedIndices resolved = resolveIndices(tuple.v1(), buildAuthorizedIndices(user, tuple.v2())); assertNoIndices((IndicesRequest.Replaceable) tuple.v1(), resolved); From f19565c3e035f6aab5a07f7c8b81bee86d72fca8 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 24 Oct 2018 19:31:26 +0300 Subject: [PATCH 35/67] SQL: Verifier allows aliases aggregates for sorting (#34773) Improve Verifier rule that prevented grouping with aliases inside aggregates to not be accepted for ordering. Close #34607 --- .../xpack/sql/analysis/analyzer/Verifier.java | 16 +++++++++++++++- .../analyzer/VerifierErrorMessagesTests.java | 13 +++++++++++++ x-pack/qa/sql/src/main/resources/agg.sql-spec | 2 ++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index e5ab3ce082b..32d57175114 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.xpack.sql.capabilities.Unresolvable; +import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.AttributeSet; import org.elasticsearch.xpack.sql.expression.Exists; @@ -249,8 +250,21 @@ final class Verifier { return; } + // take aliases declared inside the aggregates which point to the grouping (but are not included in there) + // to correlate them to the order + List groupingAndMatchingAggregatesAliases = new ArrayList<>(a.groupings()); + + a.aggregates().forEach(as -> { + if (as instanceof Alias) { + Alias al = (Alias) as; + if (Expressions.anyMatch(a.groupings(), g -> Expressions.equalsAsAttribute(al.child(), g))) { + groupingAndMatchingAggregatesAliases.add(al); + } + } + }); + // make sure to compare attributes directly - if (Expressions.anyMatch(a.groupings(), + if (Expressions.anyMatch(groupingAndMatchingAggregatesAliases, g -> e.semanticEquals(e instanceof Attribute ? Expressions.attribute(g) : g))) { return; } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index c193dcfd546..b1097211075 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; +import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.type.TypesTests; @@ -34,6 +35,13 @@ public class VerifierErrorMessagesTests extends ESTestCase { return e.getMessage().substring(header.length()); } + private LogicalPlan accepted(String sql) { + Map mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json"); + EsIndex test = new EsIndex("test", mapping); + Analyzer analyzer = new Analyzer(new FunctionRegistry(), IndexResolution.valid(test), TimeZone.getTimeZone("UTC")); + return analyzer.analyze(parser.createStatement(sql), true); + } + public void testMissingIndex() { assertEquals("1:17: Unknown index [missing]", verify(IndexResolution.notFound("missing"), "SELECT foo FROM missing")); } @@ -110,6 +118,11 @@ public class VerifierErrorMessagesTests extends ESTestCase { verify("SELECT MAX(int) FROM test GROUP BY text ORDER BY bool")); } + public void testGroupByOrderByAliasedInSelectAllowed() { + LogicalPlan lp = accepted("SELECT text t FROM test GROUP BY text ORDER BY t"); + assertNotNull(lp); + } + public void testGroupByOrderByScalarOverNonGrouped() { assertEquals("1:50: Cannot order by non-grouped column [YEAR(date [UTC])], expected [text]", verify("SELECT MAX(int) FROM test GROUP BY text ORDER BY YEAR(date)")); diff --git a/x-pack/qa/sql/src/main/resources/agg.sql-spec b/x-pack/qa/sql/src/main/resources/agg.sql-spec index 110882dc21e..c19f4d19cfc 100644 --- a/x-pack/qa/sql/src/main/resources/agg.sql-spec +++ b/x-pack/qa/sql/src/main/resources/agg.sql-spec @@ -35,6 +35,8 @@ groupByOnNumberWithWhereAndLimit SELECT emp_no e FROM "test_emp" WHERE emp_no < 10020 GROUP BY emp_no ORDER BY emp_no DESC LIMIT 1; groupByOnNumberOnAlias SELECT emp_no e FROM "test_emp" WHERE emp_no < 10020 GROUP BY e ORDER BY emp_no DESC; +groupByOnNumberWithAliasInSelect +SELECT emp_no e FROM "test_emp" WHERE emp_no < 10020 GROUP BY emp_no ORDER BY e DESC; // group by scalar groupByAddScalar From bf4d90a5dc2fe2ee51ec182a166e636d88bf5d05 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 24 Oct 2018 14:27:22 -0400 Subject: [PATCH 36/67] HLRC API for _termvectors (#33447) * HLRC API for _termvectors relates to #27205 --- .../client/RequestConverters.java | 14 + .../client/RestHighLevelClient.java | 32 ++ .../client/core/TermVectorsRequest.java | 228 ++++++++ .../client/core/TermVectorsResponse.java | 486 ++++++++++++++++++ .../java/org/elasticsearch/client/CrudIT.java | 81 +++ .../client/RequestConvertersTests.java | 41 ++ .../client/RestHighLevelClientTests.java | 3 +- .../client/core/TermVectorsResponseTests.java | 203 ++++++++ .../documentation/CRUDDocumentationIT.java | 123 +++++ .../high-level/document/term-vectors.asciidoc | 134 +++++ .../high-level/supported-apis.asciidoc | 4 +- .../index/reindex/DeleteByQueryRequest.java | 2 +- 12 files changed, 1347 insertions(+), 4 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsResponse.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/core/TermVectorsResponseTests.java create mode 100644 docs/java-rest/high-level/document/term-vectors.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index f9094c14a9d..106caea027e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -77,6 +77,7 @@ import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.client.core.TermVectorsRequest; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -578,6 +579,19 @@ final class RequestConverters { return req; } + static Request termVectors(TermVectorsRequest tvrequest) throws IOException { + String endpoint = new EndpointBuilder().addPathPart( + tvrequest.getIndex(), tvrequest.getType(), tvrequest.getId()).addPathPartAsIs("_termvectors").build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withRouting(tvrequest.getRouting()); + params.withPreference(tvrequest.getPreference()); + params.withFields(tvrequest.getFields()); + params.withRealtime(tvrequest.getRealtime()); + request.setEntity(createEntity(tvrequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getScript(GetStoredScriptRequest getStoredScriptRequest) { String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build(); Request request = new Request(HttpGet.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index eb041c77c54..342e3efbb6a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -56,6 +56,8 @@ import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.client.core.TermVectorsResponse; +import org.elasticsearch.client.core.TermVectorsRequest; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; @@ -1029,6 +1031,36 @@ public class RestHighLevelClient implements Closeable { listener, singleton(404)); } + + /** + * Calls the Term Vectors API + * + * See Term Vectors API on + * elastic.co + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + */ + public final TermVectorsResponse termvectors(TermVectorsRequest request, RequestOptions options) throws IOException { + return performRequestAndParseEntity(request, RequestConverters::termVectors, options, TermVectorsResponse::fromXContent, + emptySet()); + } + + /** + * Asynchronously calls the Term Vectors API + * + * See Term Vectors API on + * elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void termvectorsAsync(TermVectorsRequest request, RequestOptions options, ActionListener listener) { + performRequestAsyncAndParseEntity(request, RequestConverters::termVectors, options, TermVectorsResponse::fromXContent, listener, + emptySet()); + } + + /** * Executes a request using the Ranking Evaluation API. * See Ranking Evaluation API diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsRequest.java new file mode 100644 index 00000000000..5c94dfd0a33 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsRequest.java @@ -0,0 +1,228 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.core; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; + +public class TermVectorsRequest implements ToXContentObject, Validatable { + + private final String index; + private final String type; + private String id = null; + private String routing = null; + private String preference = null; + private boolean realtime = true; + private String[] fields = null; + private boolean requestPositions = true; + private boolean requestPayloads = true; + private boolean requestOffsets = true; + private boolean requestFieldStatistics = true; + private boolean requestTermStatistics = false; + private Map perFieldAnalyzer = null; + private Map filterSettings = null; + private XContentBuilder docBuilder = null; + + + /** + * Constructs TermVectorRequest for the given document + * @param index - index of the document + * @param type - type of the document + * @param docId - id of the document + */ + public TermVectorsRequest(String index, String type, String docId) { + this(index, type); + this.id = docId; + } + + /** + * Constructs TermVectorRequest for an artificial document + * @param index - index of the document + * @param type - type of the document + */ + public TermVectorsRequest(String index, String type) { + this.index = index; + this.type = type; + } + + /** + * Returns the index of the request + */ + public String getIndex() { + return index; + } + + /** + * Returns the type of the request + */ + public String getType() { + return type; + } + + /** + * Returns the id of the request + * can be NULL if there is no document ID + */ + public String getId() { + return id; + } + + /** + * Sets the fields for which term vectors information should be retrieved + */ + public void setFields(String... fields) { + this.fields = fields; + } + + public String[] getFields() { + return fields; + } + + /** + * Sets whether to request term positions + */ + public void setPositions(boolean requestPositions) { + this.requestPositions = requestPositions; + } + + /** + * Sets whether to request term payloads + */ + public void setPayloads(boolean requestPayloads) { + this.requestPayloads = requestPayloads; + } + + /** + * Sets whether to request term offsets + */ + public void setOffsets(boolean requestOffsets) { + this.requestOffsets = requestOffsets; + } + + /** + * Sets whether to request field statistics + */ + public void setFieldStatistics(boolean requestFieldStatistics) { + this.requestFieldStatistics = requestFieldStatistics; + } + + /** + * Sets whether to request term statistics + */ + public void setTermStatistics(boolean requestTermStatistics) { + this.requestTermStatistics = requestTermStatistics; + } + + /** + * Sets different analyzers than the one at the fields + */ + public void setPerFieldAnalyzer(Map perFieldAnalyzer) { + this.perFieldAnalyzer = perFieldAnalyzer; + } + + /** + * Sets an artifical document on what to request _termvectors + */ + public void setDoc(XContentBuilder docBuilder) { + this.docBuilder = docBuilder; + } + + /** + * Sets conditions for terms filtering + */ + public void setFilterSettings(Map filterSettings) { + this.filterSettings = filterSettings; + } + + /** + * Sets a routing to route a request to a particular shard + */ + public void setRouting(String routing) { + this.routing = routing; + } + + public String getRouting() { + return routing; + } + + /** + * Set a preference of which shard copies to execute the request + */ + public void setPreference(String preference) { + this.preference = preference; + } + + public String getPreference() { + return preference; + } + + /** + * Sets if the request should be realtime or near-realtime + */ + public void setRealtime(boolean realtime) { + this.realtime = realtime; + } + + /** + * Returns if the request is realtime(true) or near-realtime(false) + */ + public boolean getRealtime() { + return realtime; + } + + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + // set values only when different from defaults + if (requestPositions == false) builder.field("positions", false); + if (requestPayloads == false) builder.field("payloads", false); + if (requestOffsets == false) builder.field("offsets", false); + if (requestFieldStatistics == false) builder.field("field_statistics", false); + if (requestTermStatistics) builder.field("term_statistics", true); + if (perFieldAnalyzer != null) builder.field("per_field_analyzer", perFieldAnalyzer); + + if (docBuilder != null) { + BytesReference doc = BytesReference.bytes(docBuilder); + try (InputStream stream = doc.streamInput()) { + builder.rawField("doc", stream, docBuilder.contentType()); + } + } + + if (filterSettings != null) { + builder.startObject("filter"); + String[] filterSettingNames = + {"max_num_terms", "min_term_freq", "max_term_freq", "min_doc_freq", "max_doc_freq", "min_word_length", "max_word_length"}; + for (String settingName : filterSettingNames) { + if (filterSettings.containsKey(settingName)) builder.field(settingName, filterSettings.get(settingName)); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsResponse.java new file mode 100644 index 00000000000..5c57fc11b6f --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/core/TermVectorsResponse.java @@ -0,0 +1,486 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.core; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +import java.util.Collections; +import java.util.List; +import java.util.Comparator; +import java.util.Objects; + +public class TermVectorsResponse { + private final String index; + private final String type; + private final String id; + private final long docVersion; + private final boolean found; + private final long tookInMillis; + private final List termVectorList; + + public TermVectorsResponse( + String index, String type, String id, long version, boolean found, long tookInMillis, List termVectorList) { + this.index = index; + this.type = type; + this.id = id; + this.docVersion = version; + this.found = found; + this.tookInMillis = tookInMillis; + this.termVectorList = termVectorList; + } + + private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("term_vectors", true, + args -> { + // as the response comes from server, we are sure that args[6] will be a list of TermVector + @SuppressWarnings("unchecked") List termVectorList = (List) args[6]; + if (termVectorList != null) { + Collections.sort(termVectorList, Comparator.comparing(TermVector::getFieldName)); + } + return new TermVectorsResponse( + (String) args[0], + (String) args[1], + (String) args[2], + (long) args[3], + (boolean) args[4], + (long) args[5], + termVectorList + ); + } + ); + + static { + PARSER.declareString(constructorArg(), new ParseField("_index")); + PARSER.declareString(constructorArg(), new ParseField("_type")); + PARSER.declareString(optionalConstructorArg(), new ParseField("_id")); + PARSER.declareLong(constructorArg(), new ParseField("_version")); + PARSER.declareBoolean(constructorArg(), new ParseField("found")); + PARSER.declareLong(constructorArg(), new ParseField("took")); + PARSER.declareNamedObjects(optionalConstructorArg(), + (p, c, fieldName) -> TermVector.fromXContent(p, fieldName), new ParseField("term_vectors")); + } + + public static TermVectorsResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + /** + * Returns the index for the response + */ + public String getIndex() { + return index; + } + + /** + * Returns the type for the response + */ + public String getType() { + return type; + } + + /** + * Returns the id of the request + * can be NULL if there is no document ID + */ + public String getId() { + return id; + } + + /** + * Returns if the document is found + * always true for artificial documents + */ + public boolean getFound() { + return found; + } + + /** + * Returns the document version + */ + public long getDocVersion() { + return docVersion; + } + + /** + * Returns the time that a request took in milliseconds + */ + public long getTookInMillis() { + return tookInMillis; + } + + /** + * Returns the list of term vectors + */ + public List getTermVectorsList(){ + return termVectorList; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!(obj instanceof TermVectorsResponse)) return false; + TermVectorsResponse other = (TermVectorsResponse) obj; + return index.equals(other.index) + && type.equals(other.type) + && Objects.equals(id, other.id) + && docVersion == other.docVersion + && found == other.found + && tookInMillis == tookInMillis + && Objects.equals(termVectorList, other.termVectorList); + } + + @Override + public int hashCode() { + return Objects.hash(index, type, id, docVersion, found, tookInMillis, termVectorList); + } + + + public static final class TermVector { + + private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("term_vector", true, + (args, ctxFieldName) -> { + // as the response comes from server, we are sure that args[1] will be a list of Term + @SuppressWarnings("unchecked") List terms = (List) args[1]; + if (terms != null) { + Collections.sort(terms, Comparator.comparing(Term::getTerm)); + } + return new TermVector(ctxFieldName, (FieldStatistics) args[0], terms); + } + ); + + static { + PARSER.declareObject(optionalConstructorArg(), + (p,c) -> FieldStatistics.fromXContent(p), new ParseField("field_statistics")); + PARSER.declareNamedObjects(optionalConstructorArg(), (p, c, term) -> Term.fromXContent(p, term), new ParseField("terms")); + } + + private final String fieldName; + @Nullable + private final FieldStatistics fieldStatistics; + @Nullable + private final List terms; + + public TermVector(String fieldName, FieldStatistics fieldStatistics, List terms) { + this.fieldName = fieldName; + this.fieldStatistics = fieldStatistics; + this.terms = terms; + } + + public static TermVector fromXContent(XContentParser parser, String fieldName) { + return PARSER.apply(parser, fieldName); + } + + /** + * Returns the field name of the current term vector + */ + public String getFieldName() { + return fieldName; + } + + /** + * Returns the list of terms for the current term vector + */ + public List getTerms() { + return terms; + } + + /** + * Returns the field statistics for the current field + */ + public FieldStatistics getFieldStatistics() { + return fieldStatistics; + } + + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!(obj instanceof TermVector)) return false; + TermVector other = (TermVector) obj; + return fieldName.equals(other.fieldName) + && Objects.equals(fieldStatistics, other.fieldStatistics) + && Objects.equals(terms, other.terms); + } + + @Override + public int hashCode() { + return Objects.hash(fieldName, fieldStatistics, terms); + } + + // Class containing a general field statistics for the field + public static final class FieldStatistics { + + private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "field_statistics", true, + args -> { + return new FieldStatistics((long) args[0], (int) args[1], (long) args[2]); + } + ); + + static { + PARSER.declareLong(constructorArg(), new ParseField("sum_doc_freq")); + PARSER.declareInt(constructorArg(), new ParseField("doc_count")); + PARSER.declareLong(constructorArg(), new ParseField("sum_ttf")); + } + private final long sumDocFreq; + private final int docCount; + private final long sumTotalTermFreq; + + public FieldStatistics(long sumDocFreq, int docCount, long sumTotalTermFreq) { + this.sumDocFreq = sumDocFreq; + this.docCount = docCount; + this.sumTotalTermFreq = sumTotalTermFreq; + } + + public static FieldStatistics fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + /* + * Returns how many documents this field contains + */ + public int getDocCount() { + return docCount; + } + + /** + * Returns the sum of document frequencies for all terms in this field + */ + public long getSumDocFreq() { + return sumDocFreq; + } + + /** + * Returns the sum of total term frequencies of all terms in this field + */ + public long getSumTotalTermFreq() { + return sumTotalTermFreq; + } + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!(obj instanceof FieldStatistics)) return false; + FieldStatistics other = (FieldStatistics) obj; + return docCount == other.docCount + && sumDocFreq == other.sumDocFreq + && sumTotalTermFreq == other.sumTotalTermFreq; + } + + @Override + public int hashCode() { + return Objects.hash(docCount, sumDocFreq, sumTotalTermFreq); + } + } + + + public static final class Term { + private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token", true, + (args, ctxTerm) -> { + // as the response comes from server, we are sure that args[4] will be a list of Token + @SuppressWarnings("unchecked") List tokens = (List) args[4]; + if (tokens != null) { + Collections.sort( + tokens, + Comparator.comparing(Token::getPosition, Comparator.nullsFirst(Integer::compareTo)) + .thenComparing(Token::getStartOffset, Comparator.nullsFirst(Integer::compareTo)) + .thenComparing(Token::getEndOffset, Comparator.nullsFirst(Integer::compareTo)) + ); + } + return new Term(ctxTerm, (int) args[0], (Integer) args[1], (Long) args[2], (Float) args[3], tokens); + } + ); + static { + PARSER.declareInt(constructorArg(), new ParseField("term_freq")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("doc_freq")); + PARSER.declareLong(optionalConstructorArg(), new ParseField("ttf")); + PARSER.declareFloat(optionalConstructorArg(), new ParseField("score")); + PARSER.declareObjectArray(optionalConstructorArg(), (p,c) -> Token.fromXContent(p), new ParseField("tokens")); + } + + private final String term; + private final int termFreq; + @Nullable + private final Integer docFreq; + @Nullable + private final Long totalTermFreq; + @Nullable + private final Float score; + @Nullable + private final List tokens; + + public Term(String term, int termFreq, Integer docFreq, Long totalTermFreq, Float score, List tokens) { + this.term = term; + this.termFreq = termFreq; + this.docFreq = docFreq; + this.totalTermFreq = totalTermFreq; + this.score = score; + this.tokens = tokens; + } + + public static Term fromXContent(XContentParser parser, String term) { + return PARSER.apply(parser, term); + } + + /** + * Returns the string representation of the term + */ + public String getTerm() { + return term; + } + + /** + * Returns term frequency - the number of times this term occurs in the current document + */ + public int getTermFreq() { + return termFreq; + } + + /** + * Returns document frequency - the number of documents in the index that contain this term + */ + public Integer getDocFreq() { + return docFreq; + } + + /** + * Returns total term frequency - the number of times this term occurs across all documents + */ + public Long getTotalTermFreq( ){ + return totalTermFreq; + } + + /** + * Returns tf-idf score, if the request used some form of terms filtering + */ + public Float getScore(){ + return score; + } + + /** + * Returns a list of tokens for the term + */ + public List getTokens() { + return tokens; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!(obj instanceof Term)) return false; + Term other = (Term) obj; + return term.equals(other.term) + && termFreq == other.termFreq + && Objects.equals(docFreq, other.docFreq) + && Objects.equals(totalTermFreq, other.totalTermFreq) + && Objects.equals(score, other.score) + && Objects.equals(tokens, other.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(term, termFreq, docFreq, totalTermFreq, score, tokens); + } + } + + + public static final class Token { + + private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token", true, + args -> { + return new Token((Integer) args[0], (Integer) args[1], (Integer) args[2], (String) args[3]); + }); + static { + PARSER.declareInt(optionalConstructorArg(), new ParseField("start_offset")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("end_offset")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("position")); + PARSER.declareString(optionalConstructorArg(), new ParseField("payload")); + } + + @Nullable + private final Integer startOffset; + @Nullable + private final Integer endOffset; + @Nullable + private final Integer position; + @Nullable + private final String payload; + + + public Token(Integer startOffset, Integer endOffset, Integer position, String payload) { + this.startOffset = startOffset; + this.endOffset = endOffset; + this.position = position; + this.payload = payload; + } + + public static Token fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + /** + * Returns the start offset of the token in the document's field + */ + public Integer getStartOffset() { + return startOffset; + } + + /** + * Returns the end offset of the token in the document's field + */ + public Integer getEndOffset() { + return endOffset; + } + + /** + * Returns the position of the token in the document's field + */ + public Integer getPosition() { + return position; + } + + /** + * Returns the payload of the token or null if the payload doesn't exist + */ + public String getPayload() { + return payload; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!(obj instanceof Token)) return false; + Token other = (Token) obj; + return Objects.equals(startOffset, other.startOffset) + && Objects.equals(endOffset,other.endOffset) + && Objects.equals(position, other.position) + && Objects.equals(payload, other.payload); + } + + @Override + public int hashCode() { + return Objects.hash(startOffset, endOffset, position, payload); + } + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 3f90552fe9b..b303d7df904 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -44,12 +44,15 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.client.core.TermVectorsRequest; +import org.elasticsearch.client.core.TermVectorsResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.get.GetResult; @@ -73,6 +76,7 @@ import org.joda.time.format.DateTimeFormat; import java.io.IOException; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -80,6 +84,7 @@ import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; @@ -1154,4 +1159,80 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals(routing, getResponse.getField("_routing").getValue()); } } + + // Not entirely sure if _termvectors belongs to CRUD, and in the absence of a better place, will have it here + public void testTermvectors() throws IOException { + final String sourceIndex = "index1"; + { + // prepare : index docs + Settings settings = Settings.builder() + .put("number_of_shards", 1) + .put("number_of_replicas", 0) + .build(); + String mappings = "\"_doc\":{\"properties\":{\"field\":{\"type\":\"text\"}}}"; + createIndex(sourceIndex, settings, mappings); + assertEquals( + RestStatus.OK, + highLevelClient().bulk( + new BulkRequest() + .add(new IndexRequest(sourceIndex, "_doc", "1") + .source(Collections.singletonMap("field", "value1"), XContentType.JSON)) + .add(new IndexRequest(sourceIndex, "_doc", "2") + .source(Collections.singletonMap("field", "value2"), XContentType.JSON)) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE), + RequestOptions.DEFAULT + ).status() + ); + } + { + // test _termvectors on real documents + TermVectorsRequest tvRequest = new TermVectorsRequest(sourceIndex, "_doc", "1"); + tvRequest.setFields("field"); + TermVectorsResponse tvResponse = execute(tvRequest, highLevelClient()::termvectors, highLevelClient()::termvectorsAsync); + + TermVectorsResponse.TermVector.Token expectedToken = new TermVectorsResponse.TermVector.Token(0, 6, 0, null); + TermVectorsResponse.TermVector.Term expectedTerm = new TermVectorsResponse.TermVector.Term( + "value1", 1, null, null, null, Collections.singletonList(expectedToken)); + TermVectorsResponse.TermVector.FieldStatistics expectedFieldStats = + new TermVectorsResponse.TermVector.FieldStatistics(2, 2, 2); + TermVectorsResponse.TermVector expectedTV = + new TermVectorsResponse.TermVector("field", expectedFieldStats, Collections.singletonList(expectedTerm)); + List expectedTVlist = Collections.singletonList(expectedTV); + + assertThat(tvResponse.getIndex(), equalTo(sourceIndex)); + assertThat(Integer.valueOf(tvResponse.getId()), equalTo(1)); + assertTrue(tvResponse.getFound()); + assertEquals(expectedTVlist, tvResponse.getTermVectorsList()); + } + { + // test _termvectors on artificial documents + TermVectorsRequest tvRequest = new TermVectorsRequest(sourceIndex, "_doc"); + XContentBuilder docBuilder = XContentFactory.jsonBuilder(); + docBuilder.startObject().field("field", "valuex").endObject(); + tvRequest.setDoc(docBuilder); + TermVectorsResponse tvResponse = execute(tvRequest, highLevelClient()::termvectors, highLevelClient()::termvectorsAsync); + + TermVectorsResponse.TermVector.Token expectedToken = new TermVectorsResponse.TermVector.Token(0, 6, 0, null); + TermVectorsResponse.TermVector.Term expectedTerm = new TermVectorsResponse.TermVector.Term( + "valuex", 1, null, null, null, Collections.singletonList(expectedToken)); + TermVectorsResponse.TermVector.FieldStatistics expectedFieldStats = + new TermVectorsResponse.TermVector.FieldStatistics(2, 2, 2); + TermVectorsResponse.TermVector expectedTV = + new TermVectorsResponse.TermVector("field", expectedFieldStats, Collections.singletonList(expectedTerm)); + List expectedTVlist = Collections.singletonList(expectedTV); + + assertThat(tvResponse.getIndex(), equalTo(sourceIndex)); + assertTrue(tvResponse.getFound()); + assertEquals(expectedTVlist, tvResponse.getTermVectorsList()); + } + } + + // Not entirely sure if _termvectors belongs to CRUD, and in the absence of a better place, will have it here + public void testTermvectorsWithNonExistentIndex() { + TermVectorsRequest request = new TermVectorsRequest("non-existent", "non-existent", "non-existent"); + + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(request, highLevelClient()::termvectors, highLevelClient()::termvectorsAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 8887bed226c..0dc0a67cf7e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; +import org.elasticsearch.client.core.TermVectorsRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.common.CheckedBiConsumer; @@ -1177,6 +1178,46 @@ public class RequestConvertersTests extends ESTestCase { assertToXContentBody(explainRequest, request.getEntity()); } + public void testTermVectors() throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + TermVectorsRequest tvRequest = new TermVectorsRequest(index, type, id); + Map expectedParams = new HashMap<>(); + String[] fields; + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + tvRequest.setRouting(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + tvRequest.setRealtime(false); + expectedParams.put("realtime", "false"); + } + + boolean hasFields = randomBoolean(); + if (hasFields) { + fields = generateRandomStringArray(10, 5, false, false); + tvRequest.setFields(fields); + } + + Request request = RequestConverters.termVectors(tvRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add(index).add(type).add(id).add("_termvectors"); + + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(endpoint.toString(), request.getEndpoint()); + if (hasFields) { + assertThat(request.getParameters(), hasKey("fields")); + String[] requestFields = Strings.splitStringByCommaToArray(request.getParameters().get("fields")); + assertArrayEquals(tvRequest.getFields(), requestFields); + } + for (Map.Entry param : expectedParams.entrySet()) { + assertThat(request.getParameters(), hasEntry(param.getKey(), param.getValue())); + } + assertToXContentBody(tvRequest, request.getEntity()); + } + public void testFieldCaps() { // Create a random request. String[] indices = randomIndicesNames(0, 5); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index fda7ecdd6d6..9535043e395 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -661,8 +661,7 @@ public class RestHighLevelClientTests extends ESTestCase { "mtermvectors", "render_search_template", "scripts_painless_execute", - "tasks.get", - "termvectors" + "tasks.get" }; //These API are not required for high-level client feature completeness String[] notRequiredApi = new String[] { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/TermVectorsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/TermVectorsResponseTests.java new file mode 100644 index 00000000000..67b2704c58d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/TermVectorsResponseTests.java @@ -0,0 +1,203 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.core; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.io.IOException; +import java.util.Collections; +import java.util.Comparator; + +import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester; + +public class TermVectorsResponseTests extends ESTestCase { + + public void testFromXContent() throws IOException { + xContentTester( + this::createParser, + this::createTestInstance, + this::toXContent, + TermVectorsResponse::fromXContent) + .supportsUnknownFields(true) + .randomFieldsExcludeFilter(field -> + field.endsWith("term_vectors") || field.endsWith("terms") || field.endsWith("tokens")) + .test(); + } + + private void toXContent(TermVectorsResponse response, XContentBuilder builder) throws IOException { + builder.startObject(); + builder.field("_index", response.getIndex()); + builder.field("_type", response.getType()); + if (response.getId() != null) { + builder.field("_id", response.getId()); + } + builder.field("_version", response.getDocVersion()); + builder.field("found", response.getFound()); + builder.field("took", response.getTookInMillis()); + List termVectorList = response.getTermVectorsList(); + if (termVectorList != null) { + Collections.sort(termVectorList, Comparator.comparing(TermVectorsResponse.TermVector::getFieldName)); + builder.startObject("term_vectors"); + for (TermVectorsResponse.TermVector tv : termVectorList) { + toXContent(tv, builder); + } + builder.endObject(); + } + builder.endObject(); + } + + private void toXContent(TermVectorsResponse.TermVector tv, XContentBuilder builder) throws IOException { + builder.startObject(tv.getFieldName()); + // build fields_statistics + if (tv.getFieldStatistics() != null) { + builder.startObject("field_statistics"); + builder.field("sum_doc_freq", tv.getFieldStatistics().getSumDocFreq()); + builder.field("doc_count", tv.getFieldStatistics().getDocCount()); + builder.field("sum_ttf", tv.getFieldStatistics().getSumTotalTermFreq()); + builder.endObject(); + } + // build terms + List terms = tv.getTerms(); + if (terms != null) { + Collections.sort(terms, Comparator.comparing(TermVectorsResponse.TermVector.Term::getTerm)); + builder.startObject("terms"); + for (TermVectorsResponse.TermVector.Term term : terms) { + builder.startObject(term.getTerm()); + // build term_statistics + if (term.getDocFreq() != null) builder.field("doc_freq", term.getDocFreq()); + if (term.getTotalTermFreq() != null) builder.field("ttf", term.getTotalTermFreq()); + builder.field("term_freq", term.getTermFreq()); + + // build tokens + List tokens = term.getTokens(); + if (tokens != null) { + Collections.sort( + tokens, + Comparator.comparing(TermVectorsResponse.TermVector.Token::getPosition, Comparator.nullsFirst(Integer::compareTo)) + .thenComparing(TermVectorsResponse.TermVector.Token::getStartOffset, Comparator.nullsFirst(Integer::compareTo)) + .thenComparing(TermVectorsResponse.TermVector.Token::getEndOffset, Comparator.nullsFirst(Integer::compareTo)) + ); + builder.startArray("tokens"); + for (TermVectorsResponse.TermVector.Token token : tokens) { + builder.startObject(); + if (token.getPosition() != null) builder.field("position", token.getPosition()); + if (token.getStartOffset()!= null) builder.field("start_offset", token.getStartOffset()); + if (token.getEndOffset() != null) builder.field("end_offset", token.getEndOffset()); + if (token.getPayload() != null) builder.field("payload", token.getPayload()); + builder.endObject(); + } + builder.endArray(); + } + if (term.getScore() != null) builder.field("score", term.getScore()); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + + + protected TermVectorsResponse createTestInstance() { + String index = randomAlphaOfLength(5); + String type = randomAlphaOfLength(5); + String id = String.valueOf(randomIntBetween(1,100)); + long version = randomNonNegativeLong(); + long tookInMillis = randomNonNegativeLong(); + boolean found = randomBoolean(); + List tvList = null; + if (found == true){ + boolean hasFieldStatistics = randomBoolean(); + boolean hasTermStatistics = randomBoolean(); + boolean hasScores = randomBoolean(); + boolean hasOffsets = randomBoolean(); + boolean hasPositions = randomBoolean(); + boolean hasPayloads = randomBoolean(); + int fieldsCount = randomIntBetween(1, 3); + tvList = new ArrayList<>(fieldsCount); + for (int i = 0; i < fieldsCount; i++) { + tvList.add(randomTermVector(hasFieldStatistics, hasTermStatistics, hasScores, hasOffsets, hasPositions, hasPayloads)); + } + } + TermVectorsResponse tvresponse = new TermVectorsResponse(index, type, id, version, found, tookInMillis, tvList); + return tvresponse; + } + + private TermVectorsResponse.TermVector randomTermVector(boolean hasFieldStatistics, boolean hasTermStatistics, boolean hasScores, + boolean hasOffsets, boolean hasPositions, boolean hasPayloads) { + TermVectorsResponse.TermVector.FieldStatistics fs = null; + if (hasFieldStatistics) { + long sumDocFreq = randomNonNegativeLong(); + int docCount = randomInt(1000); + long sumTotalTermFreq = randomNonNegativeLong(); + fs = new TermVectorsResponse.TermVector.FieldStatistics(sumDocFreq, docCount, sumTotalTermFreq); + } + + int termsCount = randomIntBetween(1, 5); + List terms = new ArrayList<>(termsCount); + for (int i = 0; i < termsCount; i++) { + terms.add(randomTerm(hasTermStatistics, hasScores, hasOffsets, hasPositions, hasPayloads)); + } + + TermVectorsResponse.TermVector tv = new TermVectorsResponse.TermVector("field" + randomAlphaOfLength(2), fs, terms); + return tv; + } + + private TermVectorsResponse.TermVector.Term randomTerm(boolean hasTermStatistics, boolean hasScores, + boolean hasOffsets, boolean hasPositions, boolean hasPayloads) { + + String termTxt = "term" + randomAlphaOfLength(2); + int termFreq = randomInt(10000); + Integer docFreq = null; + Long totalTermFreq = null; + Float score = null; + List tokens = null; + if (hasTermStatistics) { + docFreq = randomInt(1000); + totalTermFreq = randomNonNegativeLong(); + } + if (hasScores) score = randomFloat(); + if (hasOffsets || hasPositions || hasPayloads ){ + int tokensCount = randomIntBetween(1, 5); + tokens = new ArrayList<>(tokensCount); + for (int i = 0; i < tokensCount; i++) { + Integer startOffset = null; + Integer endOffset = null; + Integer position = null; + String payload = null; + if (hasOffsets) { + startOffset = randomInt(1000); + endOffset = randomInt(2000); + } + if (hasPositions) position = randomInt(100); + if (hasPayloads) payload = "payload" + randomAlphaOfLength(2); + TermVectorsResponse.TermVector.Token token = + new TermVectorsResponse.TermVector.Token(startOffset, endOffset, position, payload); + tokens.add(token); + } + } + TermVectorsResponse.TermVector.Term term = + new TermVectorsResponse.TermVector.Term(termTxt, termFreq, docFreq, totalTermFreq, score, tokens); + return term; + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 4e3f778cd15..f80b532f5a4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -25,6 +25,8 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor; @@ -52,6 +54,8 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RethrottleRequest; +import org.elasticsearch.client.core.TermVectorsRequest; +import org.elasticsearch.client.core.TermVectorsResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -1503,6 +1507,125 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + // Not entirely sure if _termvectors belongs to CRUD, and in the absence of a better place, will have it here + public void testTermVectors() throws Exception { + RestHighLevelClient client = highLevelClient(); + CreateIndexRequest authorsRequest = new CreateIndexRequest("authors").mapping("doc", "user", "type=keyword"); + CreateIndexResponse authorsResponse = client.indices().create(authorsRequest, RequestOptions.DEFAULT); + assertTrue(authorsResponse.isAcknowledged()); + client.index(new IndexRequest("index", "doc", "1").source("user", "kimchy"), RequestOptions.DEFAULT); + Response refreshResponse = client().performRequest(new Request("POST", "/authors/_refresh")); + assertEquals(200, refreshResponse.getStatusLine().getStatusCode()); + + { + // tag::term-vectors-request + TermVectorsRequest request = new TermVectorsRequest("authors", "doc", "1"); + request.setFields("user"); + // end::term-vectors-request + } + + { + // tag::term-vectors-request-artificial + TermVectorsRequest request = new TermVectorsRequest("authors", "doc"); + XContentBuilder docBuilder = XContentFactory.jsonBuilder(); + docBuilder.startObject().field("user", "guest-user").endObject(); + request.setDoc(docBuilder); // <1> + // end::term-vectors-request-artificial + + // tag::term-vectors-request-optional-arguments + request.setFieldStatistics(false); // <1> + request.setTermStatistics(true); // <2> + request.setPositions(false); // <3> + request.setOffsets(false); // <4> + request.setPayloads(false); // <5> + + Map filterSettings = new HashMap<>(); + filterSettings.put("max_num_terms", 3); + filterSettings.put("min_term_freq", 1); + filterSettings.put("max_term_freq", 10); + filterSettings.put("min_doc_freq", 1); + filterSettings.put("max_doc_freq", 100); + filterSettings.put("min_word_length", 1); + filterSettings.put("max_word_length", 10); + + request.setFilterSettings(filterSettings); // <6> + + Map perFieldAnalyzer = new HashMap<>(); + perFieldAnalyzer.put("user", "keyword"); + request.setPerFieldAnalyzer(perFieldAnalyzer); // <7> + + request.setRealtime(false); // <8> + request.setRouting("routing"); // <9> + // end::term-vectors-request-optional-arguments + } + + TermVectorsRequest request = new TermVectorsRequest("authors", "doc", "1"); + request.setFields("user"); + + // tag::term-vectors-execute + TermVectorsResponse response = client.termvectors(request, RequestOptions.DEFAULT); + // end:::term-vectors-execute + + + // tag::term-vectors-response + String index = response.getIndex(); // <1> + String type = response.getType(); // <2> + String id = response.getId(); // <3> + boolean found = response.getFound(); // <4> + // end:::term-vectors-response + + // tag::term-vectors-term-vectors + if (response.getTermVectorsList() != null) { + List tvList = response.getTermVectorsList(); // <1> + for (TermVectorsResponse.TermVector tv : tvList) { + String fieldname = tv.getFieldName(); // <2> + int docCount = tv.getFieldStatistics().getDocCount(); // <3> + long sumTotalTermFreq = tv.getFieldStatistics().getSumTotalTermFreq(); // <4> + long sumDocFreq = tv.getFieldStatistics().getSumDocFreq(); // <5> + if (tv.getTerms() != null) { + List terms = tv.getTerms(); // <6> + for (TermVectorsResponse.TermVector.Term term : terms) { + String termStr = term.getTerm(); // <7> + int termFreq = term.getTermFreq(); // <8> + int docFreq = term.getDocFreq(); // <9> + long totalTermFreq = term.getTotalTermFreq(); // <10> + float score = term.getScore(); // <11> + if (term.getTokens() != null) { + List tokens = term.getTokens(); // <12> + for (TermVectorsResponse.TermVector.Token token : tokens) { + int position = token.getPosition(); // <13> + int startOffset = token.getStartOffset(); // <14> + int endOffset = token.getEndOffset(); // <15> + String payload = token.getPayload(); // <16> + } + } + } + } + } + } + // end:::term-vectors-term-vectors + + // tag::term-vectors-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(TermVectorsResponse termVectorsResponse) { + // <1> + } + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::term-vectors-execute-listener + CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + // tag::term-vectors-execute-async + client.termvectorsAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::term-vectors-execute-async + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + + } + @SuppressWarnings("unused") public void testMultiGet() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/document/term-vectors.asciidoc b/docs/java-rest/high-level/document/term-vectors.asciidoc new file mode 100644 index 00000000000..ec24a0ecef4 --- /dev/null +++ b/docs/java-rest/high-level/document/term-vectors.asciidoc @@ -0,0 +1,134 @@ +-- +:api: term-vectors +:request: TermVectorsRequest +:response: TermVectorsResponse +-- + +[id="{upid}-{api}"] +=== Term Vectors API + +Term Vectors API returns information and statistics on terms in the fields +of a particular document. The document could be stored in the index or +artificially provided by the user. + + +[id="{upid}-{api}-request"] +==== Term Vectors Request + +A +{request}+ expects an `index`, a `type` and an `id` to specify +a certain document, and fields for which the information is retrieved. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request] +-------------------------------------------------- + +Term vectors can also be generated for artificial documents, that is for +documents not present in the index: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request-artificial] +-------------------------------------------------- +<1> An artificial document is provided as an `XContentBuilder` object, +the Elasticsearch built-in helper to generate JSON content. + +===== Optional arguments + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-request-optional-arguments] +-------------------------------------------------- +<1> Set `fieldStatistics` to `false` (default is `true`) to omit document count, +sum of document frequencies, sum of total term frequencies. +<2> Set `termStatistics` to `true` (default is `false`) to display +total term frequency and document frequency. +<3> Set `positions` to `false` (default is `true`) to omit the output of +positions. +<4> Set `offsets` to `false` (default is `true`) to omit the output of +offsets. +<5> Set `payloads` to `false` (default is `true`) to omit the output of +payloads. +<6> Set `filterSettings` to filter the terms that can be returned based +on their tf-idf scores. +<7> Set `perFieldAnalyzer` to specify a different analyzer than +the one that the field has. +<8> Set `realtime` to `false` (default is `true`) to retrieve term vectors +near realtime. +<9> Set a routing parameter + + +include::../execution.asciidoc[] + + +[id="{upid}-{api}-response"] +==== TermVectorsResponse + +The `TermVectorsResponse` contains the following information: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> The index name of the document. +<2> The type name of the document. +<3> The id of the document. +<4> Indicates whether or not the document found. + + +===== Inspecting Term Vectors +If `TermVectorsResponse` contains non-null list of term vectors, +more information about them can be obtained using following: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-term-vectors] +-------------------------------------------------- +<1> The list of `TermVector` for the document +<2> The name of the current field +<3> Fields statistics for the current field - document count +<4> Fields statistics for the current field - sum of total term frequencies +<5> Fields statistics for the current field - sum of document frequencies +<6> Terms for the current field +<7> The name of the term +<8> Term frequency of the term +<9> Document frequency of the term +<10> Total term frequency of the term +<11> Score of the term +<12> Tokens of the term +<13> Position of the token +<14> Start offset of the token +<15> End offset of the token +<16> Payload of the token + + +[id="{upid}-{api}-response"] +==== TermVectorsResponse + +The `TermVectorsResponse` contains the following information: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests-file}[{api}-response] +-------------------------------------------------- +<1> The index name of the document. +<2> The type name of the document. +<3> The id of the document. +<4> Indicates whether or not the document found. +<5> Indicates whether or not there are term vectors for this document. +<6> The list of `TermVector` for the document +<7> The name of the current field +<8> Fields statistics for the current field - document count +<9> Fields statistics for the current field - sum of total term frequencies +<10> Fields statistics for the current field - sum of document frequencies +<11> Terms for the current field +<12> The name of the term +<13> Term frequency of the term +<14> Document frequency of the term +<15> Total term frequency of the term +<16> Score of the term +<17> Tokens of the term +<18> Position of the token +<19> Start offset of the token +<20> End offset of the token +<21> Payload of the token \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 6cde79a22e5..5801d79a5b7 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -14,6 +14,7 @@ Single document APIs:: * <<{upid}-exists>> * <<{upid}-delete>> * <<{upid}-update>> +* <<{upid}-term-vectors>> [[multi-doc]] Multi-document APIs:: @@ -29,6 +30,7 @@ include::document/get.asciidoc[] include::document/exists.asciidoc[] include::document/delete.asciidoc[] include::document/update.asciidoc[] +include::document/term-vectors.asciidoc[] include::document/bulk.asciidoc[] include::document/multi-get.asciidoc[] include::document/reindex.asciidoc[] @@ -372,4 +374,4 @@ don't leak into the rest of the documentation. :response!: :doc-tests-file!: :upid!: --- +-- \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java index 2713e5e2661..18307e0a568 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java @@ -51,7 +51,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; * */ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest - implements IndicesRequest.Replaceable, ToXContentObject { + implements IndicesRequest.Replaceable, ToXContentObject { public DeleteByQueryRequest() { this(new SearchRequest()); From 98cd7ca861e4397f9bb715987bd928d74ec18c85 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Wed, 24 Oct 2018 20:36:30 +0200 Subject: [PATCH 37/67] SQL: Fix queries with filter resulting in NO_MATCH (#34812) Previously, `Mapper` was returning an incorrect plan which resulted in an ``` SQLFeatureNotSupportedException: Found 1 problem(s) line 1:8: Unexecutable item ``` Queries with a `WHERE` and/or `HAVING` clause which results in NO_MATCH are now handled correctly and return 0 rows. Fixes: #34613 Co-authored-by: Costin Leau --- .../xpack/sql/planner/Mapper.java | 2 +- .../xpack/sql/planner/QueryFolder.java | 9 +- .../xpack/sql/planner/QueryFolderTests.java | 98 +++++++++++++++++++ x-pack/qa/sql/src/main/resources/agg.sql-spec | 4 + .../qa/sql/src/main/resources/filter.sql-spec | 3 + 5 files changed, 113 insertions(+), 3 deletions(-) create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java index 6a0b96f444a..f27cec67809 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/Mapper.java @@ -64,7 +64,7 @@ class Mapper extends RuleExecutor { } if (p instanceof LocalRelation) { - return new LocalExec(p.location(), (LocalRelation) p); + return new LocalExec(p.location(), ((LocalRelation) p).executable()); } if (p instanceof Project) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 1d61cb1be46..3605898210f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.planner; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.search.AggRef; import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Attribute; @@ -525,8 +526,12 @@ class QueryFolder extends RuleExecutor { protected PhysicalPlan rule(PhysicalPlan plan) { if (plan.children().size() == 1) { PhysicalPlan p = plan.children().get(0); - if (p instanceof LocalExec && ((LocalExec) p).isEmpty()) { - return new LocalExec(plan.location(), new EmptyExecutable(plan.output())); + if (p instanceof LocalExec) { + if (((LocalExec) p).isEmpty()) { + return new LocalExec(plan.location(), new EmptyExecutable(plan.output())); + } else { + throw new SqlIllegalArgumentException("Encountered a bug; {} is a LocalExec but is not empty", p); + } } } return plan; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java new file mode 100644 index 00000000000..b6643fb7d47 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.planner; + +import org.elasticsearch.test.AbstractBuilderTestCase; +import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; +import org.elasticsearch.xpack.sql.analysis.index.EsIndex; +import org.elasticsearch.xpack.sql.analysis.index.IndexResolution; +import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; +import org.elasticsearch.xpack.sql.optimizer.Optimizer; +import org.elasticsearch.xpack.sql.parser.SqlParser; +import org.elasticsearch.xpack.sql.plan.physical.LocalExec; +import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.sql.session.EmptyExecutable; +import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.type.TypesTests; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.util.Map; +import java.util.TimeZone; + +import static org.hamcrest.Matchers.startsWith; + +public class QueryFolderTests extends AbstractBuilderTestCase { + + private static SqlParser parser; + private static Analyzer analyzer; + private static Optimizer optimizer; + private static Planner planner; + + @BeforeClass + public static void init() { + parser = new SqlParser(); + + Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); + EsIndex test = new EsIndex("test", mapping); + IndexResolution getIndexResult = IndexResolution.valid(test); + analyzer = new Analyzer(new FunctionRegistry(), getIndexResult, TimeZone.getTimeZone("UTC")); + optimizer = new Optimizer(); + planner = new Planner(); + } + + @AfterClass + public static void destroy() { + parser = null; + analyzer = null; + } + + private PhysicalPlan plan(String sql) { + return planner.plan(optimizer.optimize(analyzer.analyze(parser.createStatement(sql), true)), true); + } + + public void testFoldingToLocalExecWithProject() { + PhysicalPlan p = plan("SELECT keyword FROM test WHERE 1 = 2"); + assertEquals(LocalExec.class, p.getClass()); + LocalExec le = (LocalExec) p; + assertEquals(EmptyExecutable.class, le.executable().getClass()); + EmptyExecutable ee = (EmptyExecutable) le.executable(); + assertEquals(1, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); + } + + public void testFoldingToLocalExecWithProject_WithOrderAndLimit() { + PhysicalPlan p = plan("SELECT keyword FROM test WHERE 1 = 2 ORDER BY int LIMIT 10"); + assertEquals(LocalExec.class, p.getClass()); + LocalExec le = (LocalExec) p; + assertEquals(EmptyExecutable.class, le.executable().getClass()); + EmptyExecutable ee = (EmptyExecutable) le.executable(); + assertEquals(1, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); + } + + public void testFoldingToLocalExecWithProjectWithGroupBy_WithOrderAndLimit() { + PhysicalPlan p = plan("SELECT keyword, max(int) FROM test WHERE 1 = 2 GROUP BY keyword ORDER BY 1 LIMIT 10"); + assertEquals(LocalExec.class, p.getClass()); + LocalExec le = (LocalExec) p; + assertEquals(EmptyExecutable.class, le.executable().getClass()); + EmptyExecutable ee = (EmptyExecutable) le.executable(); + assertEquals(2, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); + assertThat(ee.output().get(1).toString(), startsWith("MAX(int){a->")); + } + + public void testFoldingToLocalExecWithProjectWithGroupBy_WithHaving_WithOrderAndLimit() { + PhysicalPlan p = plan("SELECT keyword, max(int) FROM test GROUP BY keyword HAVING 1 = 2 ORDER BY 1 LIMIT 10"); + assertEquals(LocalExec.class, p.getClass()); + LocalExec le = (LocalExec) p; + assertEquals(EmptyExecutable.class, le.executable().getClass()); + EmptyExecutable ee = (EmptyExecutable) le.executable(); + assertEquals(2, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); + assertThat(ee.output().get(1).toString(), startsWith("MAX(int){a->")); + } +} diff --git a/x-pack/qa/sql/src/main/resources/agg.sql-spec b/x-pack/qa/sql/src/main/resources/agg.sql-spec index c19f4d19cfc..dab4c386a55 100644 --- a/x-pack/qa/sql/src/main/resources/agg.sql-spec +++ b/x-pack/qa/sql/src/main/resources/agg.sql-spec @@ -435,6 +435,10 @@ aggMultiGroupByMultiWithHavingUsingIn SELECT MIN(salary) min, MAX(salary) max, gender g, languages l, COUNT(*) c FROM "test_emp" WHERE languages > 0 GROUP BY g, languages HAVING max IN (74500, 74600) ORDER BY gender, languages; +// HAVING filter resulting in NoMatch +aggWithNoMatchHaving +SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g HAVING 1 > 2 ORDER BY gender; + // // NULL tests // diff --git a/x-pack/qa/sql/src/main/resources/filter.sql-spec b/x-pack/qa/sql/src/main/resources/filter.sql-spec index 79b3836b959..c4ddbf66e0d 100644 --- a/x-pack/qa/sql/src/main/resources/filter.sql-spec +++ b/x-pack/qa/sql/src/main/resources/filter.sql-spec @@ -79,6 +79,9 @@ SELECT last_name l FROM "test_emp" WHERE emp_no BETWEEN 9990 AND 10003 ORDER BY whereNotBetween SELECT last_name l FROM "test_emp" WHERE emp_no NOT BETWEEN 10010 AND 10020 ORDER BY emp_no LIMIT 5; +whereNoMatch +SELECT last_name l FROM "test_emp" WHERE 1 = 2 ORDER BY 1 LIMIT 10; + // // IN expression // From 7570d692544ea262ead23455c9b3640d5ddcc820 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 24 Oct 2018 22:03:11 +0300 Subject: [PATCH 38/67] SQL: Introduce support for IP fields (#34758) IP fields are recognized and can be used through-out the query Close #32499 --- .../xpack/sql/jdbc/jdbc/TypeConverter.java | 9 +- .../xpack/sql/type/DataType.java | 18 +- .../xpack/sql/expression/Expressions.java | 14 +- .../xpack/sql/expression/predicate/In.java | 2 +- .../plan/logical/command/sys/SysTypes.java | 4 +- .../analyzer/VerifierErrorMessagesTests.java | 2 +- .../logical/command/sys/SysParserTests.java | 4 +- .../xpack/sql/type/TypesTests.java | 7 + .../sql/src/test/resources/mapping-ip.json | 7 + .../xpack/qa/sql/jdbc/DataLoader.java | 62 +++++- .../sql/src/main/resources/command.csv-spec | 1 + x-pack/qa/sql/src/main/resources/ip.csv-spec | 198 ++++++++++++++++++ x-pack/qa/sql/src/main/resources/logs.csv | 101 +++++++++ 13 files changed, 403 insertions(+), 26 deletions(-) create mode 100644 x-pack/plugin/sql/src/test/resources/mapping-ip.json create mode 100644 x-pack/qa/sql/src/main/resources/ip.csv-spec create mode 100644 x-pack/qa/sql/src/main/resources/logs.csv diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java index 2decfe5d3c5..940e2c757df 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java @@ -30,7 +30,6 @@ import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.function.Function; -import java.util.stream.Collectors; import static java.lang.String.format; import static java.util.Calendar.DAY_OF_MONTH; @@ -41,6 +40,7 @@ import static java.util.Calendar.MINUTE; import static java.util.Calendar.MONTH; import static java.util.Calendar.SECOND; import static java.util.Calendar.YEAR; +import static java.util.stream.Collectors.toMap; /** * Conversion utilities for conversion of JDBC types to Java type and back @@ -52,9 +52,7 @@ import static java.util.Calendar.YEAR; */ final class TypeConverter { - private TypeConverter() { - - } + private TypeConverter() {} private static final long DAY_IN_MILLIS = 60 * 60 * 24 * 1000; private static final Map, SQLType> javaToJDBC; @@ -64,9 +62,10 @@ final class TypeConverter { Map, SQLType> aMap = Arrays.stream(DataType.values()) .filter(dataType -> dataType.javaClass() != null && dataType != DataType.HALF_FLOAT + && dataType != DataType.IP && dataType != DataType.SCALED_FLOAT && dataType != DataType.TEXT) - .collect(Collectors.toMap(dataType -> dataType.javaClass(), dataType -> dataType.jdbcType)); + .collect(toMap(dataType -> dataType.javaClass(), dataType -> dataType.jdbcType)); // apart from the mappings in {@code DataType} three more Java classes can be mapped to a {@code JDBCType.TIMESTAMP} // according to B-4 table from the jdbc4.2 spec aMap.put(Calendar.class, JDBCType.TIMESTAMP); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 88b952b87ac..3ad3b9090a5 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -12,7 +12,8 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Locale; import java.util.Map; -import java.util.stream.Collectors; + +import static java.util.stream.Collectors.toMap; /** * Elasticsearch data types that supported by SQL interface @@ -42,7 +43,13 @@ public enum DataType { // since ODBC and JDBC interpret precision for Date as display size, // the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone) // see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288 - DATE( JDBCType.TIMESTAMP, Timestamp.class, Long.BYTES, 24, 24); + DATE( JDBCType.TIMESTAMP, Timestamp.class, Long.BYTES, 24, 24), + // + // specialized types + // + // IP can be v4 or v6. The latter has 2^128 addresses or 340,282,366,920,938,463,463,374,607,431,768,211,456 + // aka 39 chars + IP( JDBCType.VARCHAR, String.class, 39, 39, 0,false, false, true); // @formatter:on public static final String ODBC_DATATYPE_PREFIX = "SQL_"; @@ -52,8 +59,9 @@ public enum DataType { static { jdbcToEs = Arrays.stream(DataType.values()) - .filter(dataType -> dataType != TEXT && dataType != NESTED && dataType != SCALED_FLOAT) // Remove duplicates - .collect(Collectors.toMap(dataType -> dataType.jdbcType, dataType -> dataType)); + .filter(type -> type != TEXT && type != NESTED + && type != SCALED_FLOAT && type != IP) // Remove duplicates + .collect(toMap(dataType -> dataType.jdbcType, dataType -> dataType)); odbcToEs = new HashMap<>(36); @@ -238,4 +246,4 @@ public enum DataType { (isString() && other.isString()) || (isNumeric() && other.isNumeric()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index e9a37240be0..5b25ac3df92 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.expression; +import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution; import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe; @@ -131,16 +132,17 @@ public final class Expressions { } public static TypeResolution typeMustBeNumeric(Expression e) { - return e.dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED : new TypeResolution(numericErrorMessage(e)); + return e.dataType().isNumeric() ? TypeResolution.TYPE_RESOLVED : new TypeResolution(incorrectTypeErrorMessage(e, "numeric")); } public static TypeResolution typeMustBeNumericOrDate(Expression e) { return e.dataType().isNumeric() || e.dataType() == DataType.DATE ? TypeResolution.TYPE_RESOLVED : - new TypeResolution(numericErrorMessage(e)); + new TypeResolution(incorrectTypeErrorMessage(e, "numeric", "date")); } - - private static String numericErrorMessage(Expression e) { - return "Argument required to be numeric ('" + Expressions.name(e) + "' of type '" + e.dataType().esType + "')"; + + private static String incorrectTypeErrorMessage(Expression e, String...acceptedTypes) { + return "Argument required to be " + Strings.arrayToDelimitedString(acceptedTypes, " or ") + + " ('" + Expressions.name(e) + "' type is '" + e.dataType().esType + "')"; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java index 1574e406a1e..4ce1088f806 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java @@ -42,7 +42,7 @@ public class In extends NamedExpression implements ScriptWeaver { public In(Location location, Expression value, List list) { super(location, null, CollectionUtils.combine(list, value), null); this.value = value; - this.list = list.stream().distinct().collect(Collectors.toList()); + this.list = new ArrayList<>(new LinkedHashSet<>(list)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java index ab40b076fac..96e64f3b39c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTypes.java @@ -67,8 +67,8 @@ public class SysTypes extends Command { @Override public final void execute(SqlSession session, ActionListener listener) { List> rows = Stream.of(DataType.values()) - // sort by SQL int type (that's what the JDBC/ODBC specs want) - .sorted(Comparator.comparing(t -> t.jdbcType.getVendorTypeNumber())) + // sort by SQL int type (that's what the JDBC/ODBC specs want) followed by name + .sorted(Comparator.comparing((DataType t) -> t.jdbcType.getVendorTypeNumber()).thenComparing(DataType::sqlName)) .map(t -> asList(t.esType.toUpperCase(Locale.ROOT), t.jdbcType.getVendorTypeNumber(), //https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/column-size?view=sql-server-2017 diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index b1097211075..bc4f6a9f95c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -139,7 +139,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { } public void testNotSupportedAggregateOnDate() { - assertEquals("1:8: Argument required to be numeric ('date' of type 'date')", + assertEquals("1:8: Argument required to be numeric ('date' type is 'date')", verify("SELECT AVG(date) FROM test")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java index 27ed2741311..7e73374aab0 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysParserTests.java @@ -58,11 +58,11 @@ public class SysParserTests extends ESTestCase { Command cmd = sql("SYS TYPES").v1(); List names = asList("BYTE", "LONG", "BINARY", "NULL", "INTEGER", "SHORT", "HALF_FLOAT", "SCALED_FLOAT", "FLOAT", "DOUBLE", - "KEYWORD", "TEXT", "BOOLEAN", "DATE", "UNSUPPORTED", "OBJECT", "NESTED"); + "KEYWORD", "TEXT", "IP", "BOOLEAN", "DATE", "UNSUPPORTED", "OBJECT", "NESTED"); cmd.execute(null, ActionListener.wrap(r -> { assertEquals(19, r.columnCount()); - assertEquals(17, r.size()); + assertEquals(DataType.values().length, r.size()); assertFalse(r.schema().types().contains(DataType.NULL)); // test numeric as signed assertFalse(r.column(9, Boolean.class)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index 30f9d82ff77..5a612fdbe61 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -183,6 +183,13 @@ public class TypesTests extends ESTestCase { assertThat(dt.getDataType().esType, is("unsupported")); } + public void testIpField() { + Map mapping = loadMapping("mapping-ip.json"); + assertThat(mapping.size(), is(1)); + EsField dt = mapping.get("ip_addr"); + assertThat(dt.getDataType().esType, is("ip")); + } + public void testUnsupportedTypes() { Map mapping = loadMapping("mapping-unsupported.json"); EsField dt = mapping.get("range"); diff --git a/x-pack/plugin/sql/src/test/resources/mapping-ip.json b/x-pack/plugin/sql/src/test/resources/mapping-ip.json new file mode 100644 index 00000000000..19211b82b0a --- /dev/null +++ b/x-pack/plugin/sql/src/test/resources/mapping-ip.json @@ -0,0 +1,7 @@ +{ + "properties" : { + "ip_addr" : { + "type" : "ip" + } + } +} diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java index 354c44a60ee..8105cbf9a56 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.qa.sql.jdbc; import org.apache.http.HttpHost; import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; @@ -44,6 +45,7 @@ public class DataLoader { protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception { loadEmpDatasetIntoEs(client, "test_emp", "employees"); loadEmpDatasetWithExtraIntoEs(client, "test_emp_copy", "employees"); + loadLogsDatasetIntoEs(client, "logs", "logs"); makeAlias(client, "test_alias", "test_emp", "test_emp_copy"); makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy"); } @@ -150,7 +152,7 @@ public class DataLoader { list.add(dep); }); - request = new Request("POST", "/" + index + "/emp/_bulk"); + request = new Request("POST", "/" + index + "/emp/_bulk?refresh=wait_for"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); csvToLines(fileName, (titles, fields) -> { @@ -193,6 +195,58 @@ public class DataLoader { client.performRequest(request); } + protected static void loadLogsDatasetIntoEs(RestClient client, String index, String filename) throws Exception { + Request request = new Request("PUT", "/" + index); + XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); + createIndex.startObject("settings"); + { + createIndex.field("number_of_shards", 1); + createIndex.field("number_of_replicas", 1); + } + createIndex.endObject(); + createIndex.startObject("mappings"); + { + createIndex.startObject("_doc"); + { + createIndex.startObject("properties"); + { + createIndex.startObject("id").field("type", "integer").endObject(); + createIndex.startObject("@timestamp").field("type", "date").endObject(); + createIndex.startObject("bytes_in").field("type", "integer").endObject(); + createIndex.startObject("bytes_out").field("type", "integer").endObject(); + createIndex.startObject("client_ip").field("type", "ip").endObject(); + createIndex.startObject("client_port").field("type", "integer").endObject(); + createIndex.startObject("dest_ip").field("type", "ip").endObject(); + createIndex.startObject("status").field("type", "keyword").endObject(); + } + createIndex.endObject(); + } + createIndex.endObject(); + } + createIndex.endObject().endObject(); + request.setJsonEntity(Strings.toString(createIndex)); + client.performRequest(request); + + request = new Request("POST", "/" + index + "/_doc/_bulk?refresh=wait_for"); + request.addParameter("refresh", "true"); + StringBuilder bulk = new StringBuilder(); + csvToLines(filename, (titles, fields) -> { + bulk.append("{\"index\":{\"_id\":\"" + fields.get(0) + "\"}}\n"); + bulk.append("{"); + for (int f = 0; f < titles.size(); f++) { + if (Strings.hasText(fields.get(f))) { + if (f > 0) { + bulk.append(","); + } + bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"'); + } + } + bulk.append("}\n"); + }); + request.setJsonEntity(bulk.toString()); + Response response = client.performRequest(request); + } + protected static void loadLibDatasetIntoEs(RestClient client, String index) throws Exception { Request request = new Request("PUT", "/" + index); XContentBuilder createIndex = JsonXContent.contentBuilder().startObject(); @@ -221,7 +275,7 @@ public class DataLoader { request.setJsonEntity(Strings.toString(createIndex)); client.performRequest(request); - request = new Request("POST", "/" + index + "/book/_bulk"); + request = new Request("POST", "/" + index + "/book/_bulk?refresh=wait_for"); request.addParameter("refresh", "true"); StringBuilder bulk = new StringBuilder(); csvToLines("library", (titles, fields) -> { @@ -236,7 +290,7 @@ public class DataLoader { bulk.append("}\n"); }); request.setJsonEntity(bulk.toString()); - client.performRequest(request); + Response response = client.performRequest(request); } protected static void makeAlias(RestClient client, String aliasName, String... indices) throws Exception { @@ -270,4 +324,4 @@ public class DataLoader { public static InputStream readFromJarUrl(URL source) throws IOException { return source.openStream(); } -} +} \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index 06f38f0a07e..cc71dd94712 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -152,6 +152,7 @@ showTables SHOW TABLES; name | type +logs |BASE TABLE test_alias |ALIAS test_alias_emp |ALIAS test_emp |BASE TABLE diff --git a/x-pack/qa/sql/src/main/resources/ip.csv-spec b/x-pack/qa/sql/src/main/resources/ip.csv-spec new file mode 100644 index 00000000000..e8075d57c05 --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/ip.csv-spec @@ -0,0 +1,198 @@ +// +// Tests for IP fields +// + +selectAll +SELECT * FROM logs ORDER BY id LIMIT 10; + + @timestamp | bytes_in | bytes_out | client_ip | client_port | dest_ip | id | status +------------------------+---------------+---------------+---------------+---------------+---------------+---------------+--------------- +2017-11-10T21:15:54Z|47 |388 |10.0.1.1 |9152 |172.27.1.129 |1 |OK +2017-11-10T21:15:39Z|29 |374 |10.0.1.1 |31693 |172.27.1.123 |2 |OK +2017-11-10T21:15:39Z|35 |303 |10.0.1.1 |23625 |172.27.1.113 |3 |OK +2017-11-10T21:15:39Z|36 |312 |10.0.1.1 |9932 |172.27.1.116 |4 |OK +2017-11-10T21:15:40Z|35 |344 |10.0.1.1 |22695 |172.27.1.149 |5 |OK +2017-11-10T21:15:40Z|31 |503 |10.0.1.1 |59811 |172.27.1.122 |6 |OK +2017-11-10T21:15:40Z|35 |458 |10.0.1.7 |57372 |172.27.1.140 |7 |OK +2017-11-10T21:15:41Z|35 |281 |10.0.1.8 |17370 |null |8 |OK +2017-11-10T21:15:41Z|46 |231 |10.0.1.9 |65004 |null |9 |OK +2017-11-10T20:36:07Z|40 |506 |10.0.1.10 |22661 |null |10 |OK +; + +selectIpField +SELECT client_ip, dest_ip FROM logs ORDER BY id LIMIT 10; + + client_ip | dest_ip +---------------+--------------- +10.0.1.1 |172.27.1.129 +10.0.1.1 |172.27.1.123 +10.0.1.1 |172.27.1.113 +10.0.1.1 |172.27.1.116 +10.0.1.1 |172.27.1.149 +10.0.1.1 |172.27.1.122 +10.0.1.7 |172.27.1.140 +10.0.1.8 |null +10.0.1.9 |null +10.0.1.10 |null +; + +orderByIpv4Field +SELECT client_ip, dest_ip FROM logs ORDER BY client_ip LIMIT 5; + + client_ip | dest_ip +---------------+------------------------------ +10.0.0.105 |172.27.1.1 +10.0.0.107 |172.20.10.8 +10.0.0.109 |2001:cafe::470f:60b7:f84a:25b6 +10.0.0.113 |90.128.199.24 +10.0.0.118 |172.27.1.1 +; + +orderByIpv6Field +SELECT client_ip, dest_ip FROM logs ORDER BY dest_ip ASC LIMIT 5; + + client_ip:s | dest_ip:s +---------------+------------------------------ +null |27.58.6.220 +10.0.0.147 |90.128.199.24 +10.0.0.113 |90.128.199.24 +10.0.0.129 |172.16.1.1 +10.0.1.177 |172.20.10.1 +; + +filterExactMatchIpv4 +SELECT id, client_ip, dest_ip FROM logs WHERE client_ip = '10.0.1.166' ORDER BY id LIMIT 5; + + id | client_ip | dest_ip +---------------+---------------+------------------------------ +22 |10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9f +24 |10.0.1.166 |2001:cafe::13e1:16fc:8726:1bf8 +29 |10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9f +33 |10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9f +34 |10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9e +; + +filterExactMatchIpv6 +SELECT id, client_ip, dest_ip FROM logs WHERE dest_ip = 'fe80::86ba:3bff:fe05:c3f3' ORDER BY id LIMIT 10; + + id | client_ip | dest_ip +---------------+---------------+------------------------- +19 |10.0.1.13 |fe80::86ba:3bff:fe05:c3f3 +; + + +filterRangeIpv4 +SELECT id, client_ip, dest_ip FROM logs WHERE client_ip BETWEEN '10.0.1.1' AND '10.0.1.200' ORDER BY id LIMIT 10; + + id | client_ip | dest_ip +---------------+---------------+--------------- +1 |10.0.1.1 |172.27.1.129 +2 |10.0.1.1 |172.27.1.123 +3 |10.0.1.1 |172.27.1.113 +4 |10.0.1.1 |172.27.1.116 +5 |10.0.1.1 |172.27.1.149 +6 |10.0.1.1 |172.27.1.122 +7 |10.0.1.7 |172.27.1.140 +8 |10.0.1.8 |null +9 |10.0.1.9 |null +10 |10.0.1.10 |null +; + +filterRangeCIDRIpv4 +SELECT id, client_ip, dest_ip FROM logs WHERE client_ip = '10.0.0.0/16' ORDER BY id LIMIT 5; + + id | client_ip | dest_ip +---------------+---------------+--------------- +1 |10.0.1.1 |172.27.1.129 +2 |10.0.1.1 |172.27.1.123 +3 |10.0.1.1 |172.27.1.113 +4 |10.0.1.1 |172.27.1.116 +5 |10.0.1.1 |172.27.1.149 +; + +filterRangeCIDRIpv6 +SELECT id, client_ip, dest_ip FROM logs WHERE dest_ip = '2001:cafe::/48' ORDER BY id LIMIT 5; + + id | client_ip | dest_ip +---------------+---------------+------------------------------ +20 |10.0.1.199 |2001:cafe::ff07:bdcc:bc59:ff9f +22 |10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9f +23 |null |2001:cafe::d46a:9bdc:8126:b00b +24 |10.0.1.166 |2001:cafe::13e1:16fc:8726:1bf8 +25 |10.0.1.199 |2001:cafe::ff07:bdcc:bc59:ff9f +; + +// +// waiting on https://github.com/elastic/elasticsearch/issues/34799 +// +filterInCIDRIpv4-Ignore +SELECT id, client_ip, dest_ip FROM logs WHERE dest_ip IN ('10.0.1.1', '10.0.1.200', '10.0.0.0/16') ORDER BY id LIMIT 10; + + id | client_ip | dest_ip +---------------+---------------+------------------------------ +20 |10.0.1.199 |2001:cafe::ff07:bdcc:bc59:ff9f +22 |10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9f +23 |10.0.1.199 |2001:cafe::d46a:9bdc:8126:b00b +24 |10.0.1.166 |2001:cafe::13e1:16fc:8726:1bf8 +; + + +filterInCIDRIpv6-Ignore +SELECT id, client_ip, dest_ip FROM logs WHERE dest_ip IN ('127.0.0.1', '2001:cafe::13e1:16fc:8726:1bf8', '2001:cafe::/48') ORDER BY id LIMIT 10; + + id | client_ip | dest_ip +---------------+---------------+------------------------------ +20 |10.0.1.199 |2001:cafe::ff07:bdcc:bc59:ff9f +22 |10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9f +23 |10.0.1.199 |2001:cafe::d46a:9bdc:8126:b00b +24 |10.0.1.166 |2001:cafe::13e1:16fc:8726:1bf8 +; + +groupByIpv4 +SELECT client_ip FROM logs GROUP BY client_ip LIMIT 5; + + client_ip:s +--------------- +null +10.0.0.105 +10.0.0.107 +10.0.0.109 +10.0.0.113 +; + +groupByIpv6 +SELECT dest_ip FROM logs GROUP BY dest_ip ORDER BY dest_ip DESC LIMIT 5; + + dest_ip +------------------------------ +fe80::a65e:60ff:fee8:fee9 +fe80::86ba:3bff:fe05:c3f3 +2001:cafe::ff07:bdcc:bc59:ff9f +2001:cafe::ff07:bdcc:bc59:ff9e +2001:cafe::ff07:bdcc:bc59:ff9d +; + +groupByIpv4AndIpv6 +SELECT client_ip, dest_ip FROM logs GROUP BY client_ip, dest_ip ORDER BY dest_ip DESC LIMIT 5; + + client_ip | dest_ip +---------------+------------------------------ +10.0.1.222 |fe80::a65e:60ff:fee8:fee9 +10.0.1.13 |fe80::86ba:3bff:fe05:c3f3 +null |2001:cafe::ff07:bdcc:bc59:ff9f +10.0.1.166 |2001:cafe::ff07:bdcc:bc59:ff9f +10.0.1.199 |2001:cafe::ff07:bdcc:bc59:ff9f +; + + +groupByIpv4AndPort +SELECT client_ip, client_port FROM logs GROUP BY client_ip, client_port ORDER BY client_port DESC LIMIT 5; + + client_ip | client_port +---------------+--------------- +10.0.1.9 |65004 +10.0.0.129 |63982 +null |63238 +null |61337 +null |61220 +; diff --git a/x-pack/qa/sql/src/main/resources/logs.csv b/x-pack/qa/sql/src/main/resources/logs.csv new file mode 100644 index 00000000000..240fb3752ab --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/logs.csv @@ -0,0 +1,101 @@ +id,@timestamp,bytes_in,bytes_out,client_ip,client_port,dest_ip,status +1,2017-11-10T21:15:54Z,47,388,10.0.1.1,9152,172.27.1.129,OK +2,2017-11-10T21:15:39Z,29,374,10.0.1.1,31693,172.27.1.123,OK +3,2017-11-10T21:15:39Z,35,303,10.0.1.1,23625,172.27.1.113,OK +4,2017-11-10T21:15:39Z,36,312,10.0.1.1,9932,172.27.1.116,OK +5,2017-11-10T21:15:40Z,35,344,10.0.1.1,22695,172.27.1.149,OK +6,2017-11-10T21:15:40Z,31,503,10.0.1.1,59811,172.27.1.122,OK +7,2017-11-10T21:15:40Z,35,458,10.0.1.7,57372,172.27.1.140,OK +8,2017-11-10T21:15:41Z,35,281,10.0.1.8,17370,,OK +9,2017-11-10T21:15:41Z,46,231,10.0.1.9,65004,,OK +10,2017-11-10T20:36:07Z,40,506,10.0.1.10,22661,,OK +11,2017-11-10T20:36:08Z,34,471,10.0.1.11,16752,172.27.1.131,OK +12,2017-11-10T20:36:07Z,39,503,10.0.1.12,19479,172.27.1.103,OK +13,2017-11-10T20:36:07Z,29,502,10.0.1.13,2326,172.27.1.139,OK +14,2017-11-10T20:36:15Z,35,280,10.0.1.13,51758,172.27.1.129,OK +15,2017-11-10T20:36:15Z,38,225,,22994,172.27.1.139,OK +16,2017-11-10T20:35:54Z,35,326,,5505,172.27.1.120,OK +17,2017-11-10T20:35:54Z,46,466,10.0.1.13,3666,172.27.1.103,OK +18,2017-11-10T20:35:55Z,42,238,10.0.1.13,23791,172.27.1.111,OK +19,2017-11-10T17:54:43Z,16,,10.0.1.13,,fe80::86ba:3bff:fe05:c3f3,OK +20,2017-11-10T23:23:24Z,40,,10.0.1.199,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +21,2017-11-10T17:54:59Z,24,,10.0.1.222,,fe80::a65e:60ff:fee8:fee9,OK +22,2017-11-10T21:13:27Z,20,,10.0.1.166,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +23,2017-11-10T22:37:41Z,24,,,,2001:cafe::d46a:9bdc:8126:b00b,OK +24,2017-11-10T20:34:43Z,8,,10.0.1.166,,2001:cafe::13e1:16fc:8726:1bf8,OK +25,2017-11-10T23:30:46Z,40,,10.0.1.199,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +26,2017-11-10T21:13:16Z,20,,,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +27,2017-11-10T23:36:32Z,0,,10.0.1.199,,2001:cafe::13e1:16fc:8726:1bf8,OK +28,2017-11-10T23:36:33Z,40,,10.0.1.199,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +29,2017-11-10T20:35:26Z,20,,10.0.1.166,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +30,2017-11-10T23:36:41Z,8,,,,2001:cafe::13e1:16fc:8726:1bf8,OK +31,2017-11-10T23:56:36Z,8,,10.0.1.199,,2001:cafe::13e1:16fc:8726:1bf8,OK +32,2017-11-10T20:29:25Z,32,,10.0.1.177,59769,172.20.10.1,Error +33,2017-11-10T21:35:01Z,20,,10.0.1.166,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +34,2017-11-10T21:12:17Z,20,,10.0.1.166,,2001:cafe::ff07:bdcc:bc59:ff9e,OK +35,2017-11-10T23:17:14Z,40,,10.0.1.199,,2001:cafe::ff07:bdcc:bc59:ff9d,OK +36,2017-11-10T23:28:11Z,8,,10.0.1.199,,2001:cafe::13e1:16fc:8726:1bf8,OK +37,2017-11-10T22:36:27Z,8,,10.0.1.199,,2001:cafe::13e1:16fc:8726:1bf8,OK +38,2017-11-10T20:35:55Z,36,281,,58533,172.27.1.1,OK +39,2017-11-10T20:35:55Z,25,273,,39211,,OK +40,2017-11-10T20:35:55Z,34,253,,37971,172.27.1.1,OK +41,2017-11-10T20:35:55Z,41,503,,47831,172.27.1.1,OK +42,2017-11-10T21:34:49Z,28,,,,27.58.6.220,Error +43,2017-11-10T20:35:55Z,28,206,10.0.1.200,31000,172.27.1.1,OK +44,2017-11-10T20:14:04Z,8,,10.0.1.201,,2001:cafe::13e1:16fc:8726:1bf8,OK +45,2017-11-10T19:38:06Z,37,239,10.0.1.202,3577,172.27.1.1,OK +46,2017-11-10T21:14:18Z,8,,10.0.1.203,,2001:cafe::13e1:16fc:8726:1bf8,OK +47,2017-11-10T20:35:56Z,34,202,10.0.1.204,49112,172.27.1.1,OK +48,2017-11-10T20:53:05Z,8,,10.0.1.205,,2001:cafe::13e1:16fc:8726:1bf8,OK +49,2017-11-10T21:25:42Z,8,,10.0.1.206,,2001:cafe::13e1:16fc:8726:1bf9,OK +50,2017-11-10T21:14:44Z,8,,10.0.1.207,,2001:cafe::13e1:16fc:8726:1bf4,OK +51,2017-11-10T21:28:34Z,8,,10.0.1.208,,2001:cafe::13e1:16fc:8726:1bf3,OK +52,2017-11-10T20:35:55Z,34,227,,63238,172.27.1.1,OK +53,2017-11-10T20:15:24Z,8,,,,2001:cafe::13e1:16fc:8726:1bf8,OK +54,2017-11-10T20:35:57Z,37,239,,61337,172.27.1.1,OK +55,2017-11-10T17:14:10Z,16,,10.0.1.222,,2001:cafe::a98d:374:79e4:4865,OK +56,2017-11-10T20:35:57Z,38,476,10.0.1.200,53720,172.27.1.1,OK +57,2017-11-10T23:22:13Z,8,,10.0.1.201,,2001:cafe::13e1:16fc:8726:1bf8,OK +58,2017-11-10T20:32:57Z,8,,10.0.1.202,,2001:cafe::13e1:16fc:8726:1bf7,OK +59,2017-11-10T21:24:00Z,8,,10.0.1.203,,2001:cafe::13e1:16fc:8726:1bf6,OK +60,2017-11-10T20:35:56Z,32,503,10.0.1.204,19382,172.27.1.1,OK +61,2017-11-10T23:43:10Z,0,,10.0.1.205,,2001:cafe::13e1:16fc:8726:1bf8,OK +62,2017-11-10T20:35:57Z,30,169,10.0.1.206,47532,172.27.1.1,OK +63,2017-11-10T20:21:58Z,20,,10.0.1.207,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +64,2017-11-10T20:35:57Z,41,271,10.0.1.208,16227,,OK +65,2017-11-10T20:33:06Z,28,,10.0.1.166,,172.27.1.1,Error +66,2017-11-10T20:35:57Z,33,185,,28928,172.27.1.1,OK +67,2017-11-10T20:26:21Z,20,,,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +68,2017-11-10T21:23:25Z,20,,,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +69,2017-11-10T21:23:54Z,8,,10.0.1.166,,2001:cafe::13e1:16fc:8726:1bf8,OK +70,2017-11-10T20:35:57Z,35,234,10.0.1.166,54994,172.27.1.1,OK +71,2017-11-10T00:27:03Z,48,,10.0.1.122,,2001:cafe::470f:60b7:f84a:25b6,OK +72,2017-11-10T00:27:46Z,48,,10.0.1.122,,2001:cafe::470f:60b7:f84a:25b6,OK +73,2017-11-10T20:35:58Z,35,223,,20163,172.27.1.1,OK +74,2017-11-10T20:35:57Z,32,501,10.0.1.166,51275,172.27.1.1,OK +75,2017-11-10T22:27:09Z,20,,10.0.1.199,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +76,2017-11-10T20:35:58Z,45,493,10.0.1.166,1193,172.27.1.1,OK +77,2017-11-10T22:26:44Z,20,,10.0.1.199,,2001:cafe::ff07:bdcc:bc59:ff9f,OK +78,2017-11-10T22:27:31Z,8,,10.0.1.199,,2001:cafe::13e1:16fc:8726:1bf8,OK +79,2017-11-10T20:35:52Z,47,246,,24564,172.27.1.1,OK +80,2017-11-10T00:00:22Z,48,,10.0.1.122,,2001:cafe::470f:60b7:f84a:25b6,OK +81,2017-11-10T20:35:52Z,37,420,10.0.1.166,40542,172.27.1.1,OK +82,2017-11-10T00:01:20Z,48,,10.0.1.122,,2001:cafe::470f:60b7:f84a:25b6,OK +83,2017-11-10T00:01:04Z,48,,10.0.1.122,,2001:cafe::470f:60b7:f84a:25b6,OK +84,2017-11-10T00:32:48Z,48,,10.0.1.122,,2001:cafe::470f:60b7:f84a:25b5,OK +85,2017-11-10T00:01:45Z,48,,10.0.1.122,,2001:cafe::470f:60b7:f84a:25b4,OK +86,2017-11-10T20:36:08Z,38,509,,61220,172.27.1.1,OK +87,2017-11-10T21:17:37Z,38,226,10.0.0.144,26602,,OK +88,2017-11-10T20:06:49Z,30,,10.0.0.147,53240,90.128.199.24,Error +89,2017-11-10T21:17:37Z,44,284,10.0.0.118,49479,172.27.1.1,OK +90,2017-11-10T19:51:38Z,28,,10.0.0.130,,203.131.98.151,Error +91,2017-11-10T19:51:38Z,28,,10.0.0.107,,172.20.10.8,Error +92,2017-11-10T20:06:50Z,34,215,10.0.0.113,25162,90.128.199.24,OK +93,2017-11-10T21:17:46Z,33,185,10.0.0.129,63982,172.27.1.1,OK +94,2017-11-10T19:51:38Z,28,,10.0.0.130,,203.131.98.151,Error +95,2017-11-10T21:17:46Z,28,321,10.0.0.105,4292,172.27.1.1,OK +96,2017-11-10T00:04:50Z,48,,10.0.0.109,,2001:cafe::470f:60b7:f84a:25b6,OK +97,2017-11-10T21:17:48Z,30,280,10.0.0.145,57783,172.27.1.1,OK +98,2017-11-10T21:12:24Z,74,90,10.0.0.134,57203,172.20.10.1,OK +99,2017-11-10T21:17:37Z,39,512,10.0.0.128,29333,,OK +100,2017-11-10T03:21:36Z,64,183,10.0.0.129,4541,172.16.1.1,OK From cd27b0b996d5cc81d16f57623f1e7cc5eba35c6f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 24 Oct 2018 14:25:10 -0500 Subject: [PATCH 39/67] Revert "Rollup add default metrics to histo groups (#34534)" (#34815) This reverts commit 4236358f5d53ab4f0d8457faacb19983b6b1ab82. --- .../rollup/job/config/RollupJobConfig.java | 5 -- .../org/elasticsearch/client/RollupIT.java | 40 ++++----- .../high-level/rollup/put_job.asciidoc | 62 -------------- docs/reference/rollup/apis/get-job.asciidoc | 21 ----- docs/reference/rollup/apis/put-job.asciidoc | 84 +------------------ .../rollup/apis/rollup-caps.asciidoc | 6 -- .../rollup/apis/rollup-index-caps.asciidoc | 6 -- .../core/rollup/job/RollupJobConfig.java | 27 +----- .../core/rollup/job/RollupJobConfigTests.java | 78 ----------------- .../job/RollupIndexerIndexingTests.java | 26 ------ .../rest-api-spec/test/rollup/delete_job.yml | 12 --- .../rest-api-spec/test/rollup/get_jobs.yml | 12 --- .../test/rollup/get_rollup_caps.yml | 12 --- .../test/rollup/get_rollup_index_caps.yml | 22 ----- .../rest-api-spec/test/rollup/put_job.yml | 4 - .../test/rollup/security_tests.yml | 4 - 16 files changed, 18 insertions(+), 403 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/RollupJobConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/RollupJobConfig.java index 9bf639f548d..d8e87eeb3d5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/RollupJobConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/rollup/job/config/RollupJobConfig.java @@ -42,11 +42,6 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona /** * This class holds the configuration details of a rollup job, such as the groupings, metrics, what * index to rollup and where to roll them to. - * - * When the configuration is stored server side, if there is no {@link MetricConfig} for the fields referenced in the - * {@link HistogramGroupConfig} and {@link DateHistogramGroupConfig} in the passed {@link GroupConfig}, - * then default metrics of {@code ["min", "max"]} are provided - * */ public class RollupJobConfig implements Validatable, ToXContentObject { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java index e30c1b383a2..7a5f873d45c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RollupIT.java @@ -46,7 +46,6 @@ import org.elasticsearch.client.rollup.job.config.GroupConfig; import org.elasticsearch.client.rollup.job.config.MetricConfig; import org.elasticsearch.client.rollup.job.config.RollupJobConfig; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -169,9 +168,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase { public void testPutAndGetRollupJob() throws Exception { // TODO expand this to also test with histogram and terms? final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY)); - final List metrics = Arrays.asList( - new MetricConfig("value", SUPPORTED_METRICS), - new MetricConfig("date", Arrays.asList(MaxAggregationBuilder.NAME))); + final List metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS)); final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600)); PutRollupJobRequest putRollupJobRequest = @@ -199,28 +196,21 @@ public class RollupIT extends ESRestHighLevelClientTestCase { assertEquals(groups.getDateHistogram().getTimeZone(), source.get("date.date_histogram.time_zone")); for (MetricConfig metric : metrics) { - if (metric.getField().equals("value")) { - for (String name : metric.getMetrics()) { - Number value = (Number) source.get(metric.getField() + "." + name + ".value"); - if ("min".equals(name)) { - assertEquals(min, value.intValue()); - } else if ("max".equals(name)) { - assertEquals(max, value.intValue()); - } else if ("sum".equals(name)) { - assertEquals(sum, value.doubleValue(), 0.0d); - } else if ("avg".equals(name)) { - assertEquals(sum, value.doubleValue(), 0.0d); - Number avgCount = (Number) source.get(metric.getField() + "." + name + "._count"); - assertEquals(numDocs, avgCount.intValue()); - } else if ("value_count".equals(name)) { - assertEquals(numDocs, value.intValue()); - } + for (String name : metric.getMetrics()) { + Number value = (Number) source.get(metric.getField() + "." + name + ".value"); + if ("min".equals(name)) { + assertEquals(min, value.intValue()); + } else if ("max".equals(name)) { + assertEquals(max, value.intValue()); + } else if ("sum".equals(name)) { + assertEquals(sum, value.doubleValue(), 0.0d); + } else if ("avg".equals(name)) { + assertEquals(sum, value.doubleValue(), 0.0d); + Number avgCount = (Number) source.get(metric.getField() + "." + name + "._count"); + assertEquals(numDocs, avgCount.intValue()); + } else if ("value_count".equals(name)) { + assertEquals(numDocs, value.intValue()); } - } else { - Number value = (Number) source.get(metric.getField() + ".max.value"); - assertEquals( - DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime("2018-01-01T00:59:50").getMillis(), - value.longValue()); } } }); diff --git a/docs/java-rest/high-level/rollup/put_job.asciidoc b/docs/java-rest/high-level/rollup/put_job.asciidoc index 50b05189b13..0b7ece05ca8 100644 --- a/docs/java-rest/high-level/rollup/put_job.asciidoc +++ b/docs/java-rest/high-level/rollup/put_job.asciidoc @@ -119,68 +119,6 @@ include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup- <2> Adds the metrics to compute on the `temperature` field <3> Adds the metrics to compute on the `voltage` field -By default, metrics `min`/`max` for the fields in `DateHistogramGroupConfig` and -`HistogramGroupConfig` are added to the configuration unless the user already provided -metrics for those fields. - -So, for the following configuration: - -[source,js] --------------------------------------------------- -"groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d", - "time_zone": "UTC" - }, - "terms": { - "fields": ["hostname", "datacenter"] - }, - "histogram": { - "fields": ["load", "net_in", "net_out"], - "interval": 5 - }, -}, -"metrics": [ - { - "field": "load", - "metrics": ["max"] - }, - { - "field": "net_in", - "metrics": ["max"] - } -] --------------------------------------------------- -// NOTCONSOLE - -The following will be the metrics in the configuration after -the defaults are added server side. Note the default metrics -provided for the fields `timestamp` and `net_out` - -[source,js] --------------------------------------------------- -"metrics": [ - { - "field": "load", - "metrics": ["max"] - }, - { - "field": "net_in", - "metrics": ["max"] - }, - { - "field": "timestamp", - "metrics": ["min", "max"] - }, - { - "field": "net_out", - "metrics": ["min", "max"] - } -] --------------------------------------------------- -// NOTCONSOLE [[java-rest-high-x-pack-rollup-put-rollup-job-execution]] ==== Execution diff --git a/docs/reference/rollup/apis/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc index 8ba126a8846..794d7248012 100644 --- a/docs/reference/rollup/apis/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -88,13 +88,6 @@ Which will yield the following response: "metrics" : [ "avg" ] - }, - { - "field": "timestamp", - "metrics": [ - "max", - "min" - ] } ], "timeout" : "20s", @@ -215,13 +208,6 @@ Which will yield the following response: "metrics" : [ "avg" ] - }, - { - "field": "timestamp", - "metrics": [ - "min", - "max" - ] } ], "timeout" : "20s", @@ -271,13 +257,6 @@ Which will yield the following response: "metrics" : [ "avg" ] - }, - { - "field": "timestamp", - "metrics": [ - "min", - "max" - ] } ], "timeout" : "20s", diff --git a/docs/reference/rollup/apis/put-job.asciidoc b/docs/reference/rollup/apis/put-job.asciidoc index 55568933d89..79e30ae8dc9 100644 --- a/docs/reference/rollup/apis/put-job.asciidoc +++ b/docs/reference/rollup/apis/put-job.asciidoc @@ -68,7 +68,7 @@ PUT _xpack/rollup/job/sensor "groups" : { "date_histogram": { "field": "timestamp", - "interval": "60m", + "interval": "1h", "delay": "7d" }, "terms": { @@ -98,84 +98,4 @@ When the job is created, you receive the following results: "acknowledged": true } ---- -// TESTRESPONSE - -By default the metrics `min`/`max` are added -for the fields in the `date_histogram` and `histogram` configurations. -If this behavior is not desired, explicitly configure metrics -for those fields. This will override the defaults. - -If the following is provided - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/sensor2 -{ - "index_pattern": "sensor-*", - "rollup_index": "sensor_rollup", - "cron": "*/30 * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "60m", - "delay": "7d" - }, - "histogram": { - "fields": ["voltage", "temperature"], - "interval": 5 - } - }, - "metrics": [ - { - "field": "temperature", - "metrics": ["min", "max", "sum"] - } - ] -} --------------------------------------------------- -// NOTCONSOLE -// TEST[setup:sensor_index] - -The actual config when created in the cluster will look as follows. - -[source,js] --------------------------------------------------- -{ - "index_pattern": "sensor-*", - "rollup_index": "sensor_rollup", - "cron": "*/30 * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "60m", - "delay": "7d" - }, - "histogram": { - "fields": ["voltage", "temperature"], - "interval": 5 - } - }, - "metrics": [ - { - "field": "temperature", - "metrics": ["min", "max", "sum"] - }, - { - "field": "voltage", <1> - "metrics": ["min", "max"] - }, - { - "field": "timestamp", <1> - "metrics": ["min", "max"] - } - ] -} --------------------------------------------------- -// NOTCONSOLE -<1> Notice the new default metrics gathered for `voltage` and `timestamp`. - Since these fields were referenced in `groups.histogram` and - `groups.date_histogram` configurations - respectively and no metrics were requested for them, - they both got the default metrics of `["min", "max"]`. +// TESTRESPONSE \ No newline at end of file diff --git a/docs/reference/rollup/apis/rollup-caps.asciidoc b/docs/reference/rollup/apis/rollup-caps.asciidoc index 6679c4c5f06..274037cae8f 100644 --- a/docs/reference/rollup/apis/rollup-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-caps.asciidoc @@ -124,12 +124,6 @@ Which will yield the following response: "time_zone" : "UTC", "interval" : "1h", "delay": "7d" - }, - { - "agg": "max" - }, - { - "agg": "min" } ], "voltage" : [ diff --git a/docs/reference/rollup/apis/rollup-index-caps.asciidoc b/docs/reference/rollup/apis/rollup-index-caps.asciidoc index 64c5c5ac784..df314fb458b 100644 --- a/docs/reference/rollup/apis/rollup-index-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-index-caps.asciidoc @@ -120,12 +120,6 @@ This will yield the following response: "time_zone" : "UTC", "interval" : "1h", "delay": "7d" - }, - { - "agg" : "max" - }, - { - "agg" : "min" } ], "voltage" : [ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java index 172a6fe617b..51a4736e3ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java @@ -20,19 +20,14 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -51,7 +46,6 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject { private static final String PAGE_SIZE = "page_size"; private static final String INDEX_PATTERN = "index_pattern"; private static final String ROLLUP_INDEX = "rollup_index"; - private static final List DEFAULT_HISTO_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME); private final String id; private final String indexPattern; @@ -129,7 +123,7 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject { this.indexPattern = indexPattern; this.rollupIndex = rollupIndex; this.groupConfig = groupConfig; - this.metricsConfig = addDefaultMetricsIfNeeded(metricsConfig, groupConfig); + this.metricsConfig = metricsConfig != null ? metricsConfig : Collections.emptyList(); this.timeout = timeout != null ? timeout : DEFAULT_TIMEOUT; this.cron = cron; this.pageSize = pageSize; @@ -283,23 +277,4 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject { public static RollupJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) throws IOException { return PARSER.parse(parser, optionalJobId); } - - private static List addDefaultMetricsIfNeeded(List metrics, GroupConfig groupConfig) { - List inputMetrics = metrics != null ? new ArrayList<>(metrics) : new ArrayList<>(); - if (groupConfig != null) { - String timeField = groupConfig.getDateHistogram().getField(); - Set currentFields = inputMetrics.stream().map(MetricConfig::getField).collect(Collectors.toSet()); - if (currentFields.contains(timeField) == false) { - inputMetrics.add(new MetricConfig(timeField, DEFAULT_HISTO_METRICS)); - } - if (groupConfig.getHistogram() != null) { - for (String histoField : groupConfig.getHistogram().getFields()) { - if (currentFields.contains(histoField) == false) { - inputMetrics.add(new MetricConfig(histoField, DEFAULT_HISTO_METRICS)); - } - } - } - } - return Collections.unmodifiableList(inputMetrics); - } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java index fa9009af018..09d00e11fef 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java @@ -11,23 +11,10 @@ import org.elasticsearch.test.AbstractSerializingTestCase; import org.junit.Before; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Random; -import java.util.Set; -import java.util.stream.Collectors; -import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomCron; -import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig; -import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig; -import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomMetricsConfigs; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomRollupJobConfig; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.isIn; public class RollupJobConfigTests extends AbstractSerializingTestCase { @@ -176,69 +163,4 @@ public class RollupJobConfigTests extends AbstractSerializingTestCase metrics = new ArrayList<>(randomMetricsConfigs(random)); - for (String histoField : histogramGroupConfig1.getFields()) { - metrics.add(new MetricConfig(histoField, Arrays.asList("max"))); - } - GroupConfig groupConfig = new GroupConfig(dateHistogramGroupConfig, histogramGroupConfig1, null); - RollupJobConfig rollupJobConfig = new RollupJobConfig( - randomAsciiAlphanumOfLengthBetween(random, 1, 20), - "indexes_*", - "rollup_" + randomAsciiAlphanumOfLengthBetween(random, 1, 20), - randomCron(), - randomIntBetween(1, 10), - groupConfig, - metrics, - null); - Set metricFields = rollupJobConfig.getMetricsConfig().stream().map(MetricConfig::getField).collect(Collectors.toSet()); - assertThat(dateHistogramGroupConfig.getField(), isIn(metricFields)); - List histoFields = Arrays.asList(histogramGroupConfig1.getFields()); - rollupJobConfig.getMetricsConfig().forEach(metricConfig -> { - if (histoFields.contains(metricConfig.getField())) { - // Since it is explicitly included, the defaults should not be added - assertThat(metricConfig.getMetrics(), containsInAnyOrder("max")); - } - if (metricConfig.getField().equals(dateHistogramGroupConfig.getField())) { - assertThat(metricConfig.getMetrics(), containsInAnyOrder("max", "min")); - } - }); - } - - public void testDefaultFieldsForHistograms() { - final Random random = random(); - DateHistogramGroupConfig dateHistogramGroupConfig = randomDateHistogramGroupConfig(random); - HistogramGroupConfig histogramGroupConfig1 = randomHistogramGroupConfig(random); - List metrics = new ArrayList<>(randomMetricsConfigs(random)); - metrics.add(new MetricConfig(dateHistogramGroupConfig.getField(), Arrays.asList("max"))); - GroupConfig groupConfig = new GroupConfig(dateHistogramGroupConfig, histogramGroupConfig1, null); - RollupJobConfig rollupJobConfig = new RollupJobConfig( - randomAsciiAlphanumOfLengthBetween(random, 1, 20), - "indexes_*", - "rollup_" + randomAsciiAlphanumOfLengthBetween(random, 1, 20), - randomCron(), - randomIntBetween(1, 10), - groupConfig, - metrics, - null); - Set metricFields = rollupJobConfig.getMetricsConfig().stream().map(MetricConfig::getField).collect(Collectors.toSet()); - for (String histoField : histogramGroupConfig1.getFields()) { - assertThat(histoField, isIn(metricFields)); - } - assertThat(dateHistogramGroupConfig.getField(), isIn(metricFields)); - List histoFields = Arrays.asList(histogramGroupConfig1.getFields()); - rollupJobConfig.getMetricsConfig().forEach(metricConfig -> { - if (histoFields.contains(metricConfig.getField())) { - assertThat(metricConfig.getMetrics(), containsInAnyOrder("max", "min")); - } - if (metricConfig.getField().equals(dateHistogramGroupConfig.getField())) { - // Since it is explicitly included, the defaults should not be added - assertThat(metricConfig.getMetrics(), containsInAnyOrder("max")); - } - }); - } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index c3add626d67..f33c1d4e008 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -116,8 +116,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1ms", "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", 3.0, - "the_histo.max.value", 3.0, "_rollup.id", job.getId() ) )); @@ -131,8 +129,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1ms", "the_histo.date_histogram._count", 1, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", 7.0, - "the_histo.max.value", 7.0, "_rollup.id", job.getId() ) )); @@ -183,8 +179,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "counter.max.value", 20.0, "counter.sum.value", 50.0, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) asLong("2015-03-31T03:00:00"), - "the_histo.max.value", (double) asLong("2015-03-31T03:40:00"), "_rollup.id", job.getId() ) )); @@ -203,8 +197,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "counter.max.value", 55.0, "counter.sum.value", 141.0, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) asLong("2015-03-31T04:00:00"), - "the_histo.max.value", (double) asLong("2015-03-31T04:40:00"), "_rollup.id", job.getId() ) )); @@ -223,8 +215,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "counter.max.value", 80.0, "counter.sum.value", 275.0, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) asLong("2015-03-31T05:00:00"), - "the_histo.max.value", (double) asLong("2015-03-31T05:40:00"), "_rollup.id", job.getId() ) )); @@ -243,8 +233,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "counter.max.value", 100.0, "counter.sum.value", 270.0, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) asLong("2015-03-31T06:00:00"), - "the_histo.max.value", (double) asLong("2015-03-31T06:40:00"), "_rollup.id", job.getId() ) )); @@ -263,8 +251,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "counter.max.value", 200.0, "counter.sum.value", 440.0, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) asLong("2015-03-31T07:00:00"), - "the_histo.max.value", (double) asLong("2015-03-31T07:40:00"), "_rollup.id", job.getId() ) )); @@ -306,8 +292,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1m", "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) (now - TimeValue.timeValueHours(5).getMillis()), - "the_histo.max.value", (double) (now - TimeValue.timeValueHours(5).getMillis()), "_rollup.id", job.getId() ) )); @@ -321,8 +305,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1m", "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) (now - TimeValue.timeValueMinutes(75).getMillis()), - "the_histo.max.value", (double) (now - TimeValue.timeValueMinutes(75).getMillis()), "_rollup.id", job.getId() ) )); @@ -336,8 +318,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1m", "the_histo.date_histogram._count", 1, "the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(), - "the_histo.min.value", (double) (now - TimeValue.timeValueMinutes(61).getMillis()), - "the_histo.max.value", (double) (now - TimeValue.timeValueMinutes(61).getMillis()), "_rollup.id", job.getId() ) )); @@ -377,8 +357,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1d", "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", timeZone.toString(), - "the_histo.min.value", (double) (now - TimeValue.timeValueHours(10).getMillis()), - "the_histo.max.value", (double) (now - TimeValue.timeValueHours(8).getMillis()), "_rollup.id", job.getId() ) )); @@ -398,8 +376,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1d", "the_histo.date_histogram._count", 2, "the_histo.date_histogram.time_zone", timeZone.toString(), - "the_histo.min.value", (double) (now - TimeValue.timeValueHours(10).getMillis()), - "the_histo.max.value", (double) (now - TimeValue.timeValueHours(8).getMillis()), "_rollup.id", job.getId() ) )); @@ -413,8 +389,6 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { "the_histo.date_histogram.interval", "1d", "the_histo.date_histogram._count", 5, "the_histo.date_histogram.time_zone", timeZone.toString(), - "the_histo.min.value", (double) (now - TimeValue.timeValueHours(6).getMillis()), - "the_histo.max.value", (double) now, "_rollup.id", job.getId() ) )); diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index ebf953c9352..861be094fa6 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -62,10 +62,6 @@ setup: - "min" - "max" - "sum" - - field: "the_field" - metrics: - - "max" - - "min" timeout: "20s" stats: pages_processed: 0 @@ -113,10 +109,6 @@ setup: - "min" - "max" - "sum" - - field: "the_field" - metrics: - - "max" - - "min" timeout: "20s" stats: pages_processed: 0 @@ -164,10 +156,6 @@ setup: - "min" - "max" - "sum" - - field: "the_field" - metrics: - - "max" - - "min" timeout: "20s" stats: pages_processed: 0 diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml index 7af7f858f4f..759ddbad2b4 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -63,10 +63,6 @@ setup: - "min" - "max" - "sum" - - field: "the_field" - metrics: - - "max" - - "min" timeout: "20s" stats: pages_processed: 0 @@ -178,10 +174,6 @@ setup: - "min" - "max" - "sum" - - field: "the_field" - metrics: - - "max" - - "min" timeout: "20s" stats: pages_processed: 0 @@ -208,10 +200,6 @@ setup: - "min" - "max" - "sum" - - field: "the_field" - metrics: - - "max" - - "min" timeout: "20s" stats: pages_processed: 0 diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml index 145953e2490..f8bb401a772 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml @@ -77,8 +77,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -126,8 +124,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -140,8 +136,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -215,8 +209,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -229,8 +221,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -246,8 +236,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml index 751b0b2b89a..bd49f2c3389 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml @@ -77,8 +77,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -126,8 +124,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -140,8 +136,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -190,8 +184,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -265,8 +257,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -279,8 +269,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -295,8 +283,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -374,8 +360,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -388,8 +372,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -404,8 +386,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" @@ -479,8 +459,6 @@ setup: - agg: "date_histogram" interval: "1h" time_zone: "UTC" - - agg: "max" - - agg: "min" value_field: - agg: "min" - agg: "max" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml index 483be951e8a..cbb6f8956b1 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml @@ -63,10 +63,6 @@ setup: - "min" - "max" - "sum" - - field: "the_field" - metrics: - - "max" - - "min" timeout: "20s" stats: pages_processed: 0 diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml index 650983b5cff..57bfd821ea2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/security_tests.yml @@ -173,8 +173,6 @@ teardown: hits.hits.0._source: timestamp.date_histogram.time_zone: "UTC" timestamp.date_histogram.timestamp: 0 - timestamp.max.value: 123.0 - timestamp.min.value: 123.0 value_field.max.value: 1232.0 _rollup.version: 2 timestamp.date_histogram.interval: "1s" @@ -336,8 +334,6 @@ teardown: hits.hits.0._source: timestamp.date_histogram.time_zone: "UTC" timestamp.date_histogram.timestamp: 0 - timestamp.max.value: 123.0 - timestamp.min.value: 123.0 value_field.max.value: 1232.0 _rollup.version: 2 timestamp.date_histogram.interval: "1s" From d51bc05dce6432a071b7c63de8efaeb9ace94115 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 24 Oct 2018 21:27:07 +0200 Subject: [PATCH 40/67] [TEST] Improve validation of do sections (#34734) We throw parsing exception when an unknown array is found, but we don't when an unknown top-level field is found. This commit makes sure that unsupported top-level fields are not ignored in a do section. Closes #34651 --- .../rest-api-spec/test/painless/30_search.yml | 30 +++++++++---------- .../test/painless_whitelist/20_whitelist.yml | 2 +- .../test/painless_whitelist/30_static.yml | 2 +- .../test/rest/yaml/section/DoSection.java | 2 ++ .../rest/yaml/section/DoSectionTests.java | 14 ++++++++- 5 files changed, 32 insertions(+), 18 deletions(-) diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml index 9a43e1f9aa4..0c0e980d95a 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml @@ -23,8 +23,8 @@ indices.refresh: {} - do: - index: test search: + index: test body: query: script: @@ -45,8 +45,8 @@ - match: { hits.hits.1.fields.sNum1.0: 3.0 } - do: - index: test search: + index: test body: query: script: @@ -70,8 +70,8 @@ - match: { hits.hits.1.fields.sNum1.0: 3.0 } - do: - index: test search: + index: test body: query: script: @@ -96,8 +96,8 @@ - match: { hits.hits.2.fields.sNum1.0: 3.0 } - do: - index: test search: + index: test body: query: script: @@ -127,8 +127,8 @@ indices.refresh: {} - do: - index: test search: + index: test body: query: function_score: @@ -149,8 +149,8 @@ - match: { hits.hits.1._id: "1" } - do: - index: test search: + index: test body: query: function_score: @@ -171,8 +171,8 @@ - match: { hits.hits.1._id: "2" } - do: - index: test search: + index: test body: query: function_score: @@ -193,8 +193,8 @@ - match: { hits.hits.1._id: "1" } - do: - index: test search: + index: test body: query: function_score: @@ -215,8 +215,8 @@ - match: { hits.hits.1._id: "1" } - do: - index: test search: + index: test body: query: function_score: @@ -237,8 +237,8 @@ - match: { hits.hits.1._id: "1" } - do: - index: test search: + index: test body: query: function_score: @@ -274,8 +274,8 @@ indices.refresh: {} - do: - index: test search: + index: test body: query: function_score: @@ -325,8 +325,8 @@ - do: - index: test search: + index: test body: query: function_score: @@ -364,8 +364,8 @@ - do: - index: test search: + index: test body: script_fields: foobar: @@ -391,8 +391,8 @@ - do: - index: test search: + index: test body: aggs: value_agg: @@ -428,8 +428,8 @@ - do: catch: bad_request - index: test search: + index: test body: aggs: genre: diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml index bbb0b44ef1d..b864edaa2a9 100644 --- a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml @@ -11,8 +11,8 @@ indices.refresh: {} - do: - index: test search: + index: test body: query: match_all: {} diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/30_static.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/30_static.yml index b6592637296..1dbaf655bf5 100644 --- a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/30_static.yml +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/30_static.yml @@ -11,8 +11,8 @@ indices.refresh: {} - do: - index: test search: + index: test body: query: match_all: {} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index e1346d3f696..892c5874dce 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -106,6 +106,8 @@ public class DoSection implements ExecutableSection { } else if (token.isValue()) { if ("catch".equals(currentFieldName)) { doSection.setCatch(parser.text()); + } else { + throw new ParsingException(parser.getTokenLocation(), "unsupported field [" + currentFieldName + "]"); } } else if (token == XContentParser.Token.START_ARRAY) { if ("warnings".equals(currentFieldName)) { diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index 777ff746118..0af7fe68450 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -23,6 +23,7 @@ import org.apache.http.HttpHost; import org.elasticsearch.Version; import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -52,7 +53,7 @@ import static org.mockito.Mockito.when; public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase { - public void testWarningHeaders() throws IOException { + public void testWarningHeaders() { { final DoSection section = new DoSection(new XContentLocation(1, 1)); @@ -358,6 +359,17 @@ public class DoSectionTests extends AbstractClientYamlTestFragmentParserTestCase assertThat(doSection.getApiCallSection().hasBody(), equalTo(false)); } + public void testUnsupportedTopLevelField() throws Exception { + parser = createParser(YamlXContent.yamlXContent, + "max_concurrent_shard_requests: 1" + ); + + ParsingException e = expectThrows(ParsingException.class, () -> DoSection.parse(parser)); + assertThat(e.getMessage(), is("unsupported field [max_concurrent_shard_requests]")); + parser.nextToken(); + parser.nextToken(); + } + public void testParseDoSectionWithHeaders() throws Exception { parser = createParser(YamlXContent.yamlXContent, "headers:\n" + From 04f3e67c7792d4ecd46ee42e3428f4941143ca72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Campinas?= Date: Wed, 24 Oct 2018 21:31:29 +0200 Subject: [PATCH 41/67] Remove redundant method from RestClearScrollAction (#34268) The check for null argument is already done in `splitStringByCommaToArray`, hence it can be removed, which allows us to remove the whole splitScrollIds private method. --- .../rest/action/search/RestClearScrollAction.java | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java index dc2474c6533..d9f746a029c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java @@ -50,7 +50,7 @@ public class RestClearScrollAction extends BaseRestHandler { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String scrollIds = request.param("scroll_id"); ClearScrollRequest clearRequest = new ClearScrollRequest(); - clearRequest.setScrollIds(Arrays.asList(splitScrollIds(scrollIds))); + clearRequest.setScrollIds(Arrays.asList(Strings.splitStringByCommaToArray(scrollIds))); request.withContentOrSourceParamParserOrNull((xContentParser -> { if (xContentParser != null) { // NOTE: if rest request with xcontent body has request parameters, values parsed from request body have the precedence @@ -65,10 +65,4 @@ public class RestClearScrollAction extends BaseRestHandler { return channel -> client.clearScroll(clearRequest, new RestStatusToXContentListener<>(channel)); } - private static String[] splitScrollIds(String scrollIds) { - if (scrollIds == null) { - return Strings.EMPTY_ARRAY; - } - return Strings.splitStringByCommaToArray(scrollIds); - } } From 5f588180f942b07bcf462e4416d3ee5f04c8b81f Mon Sep 17 00:00:00 2001 From: Andrey Atapin Date: Thu, 25 Oct 2018 00:53:31 +0500 Subject: [PATCH 42/67] Improve IndexNotFoundException's default error message (#34649) This commit adds the index name to the error message when an index is not found. --- .../java/org/elasticsearch/client/CrudIT.java | 6 ++-- .../client/GetAliasesResponseTests.java | 6 ++-- .../elasticsearch/client/IndicesClientIT.java | 18 +++++++----- .../org/elasticsearch/client/SearchIT.java | 2 +- .../documentation/CRUDDocumentationIT.java | 2 +- .../mustache/MultiSearchTemplateIT.java | 2 +- .../index/reindex/ReindexFailureTests.java | 2 +- .../test/reindex/25_no_auto_create.yml | 6 ++-- .../action/support/AutoCreateIndex.java | 8 ++--- .../index/IndexNotFoundException.java | 6 ++-- .../ElasticsearchExceptionTests.java | 2 +- .../action/admin/indices/get/GetIndexIT.java | 2 +- .../bulk/BulkProcessorClusterSettingsIT.java | 2 +- .../action/support/AutoCreateIndexTests.java | 12 ++++---- .../termvectors/MultiTermVectorsIT.java | 2 +- .../cluster/SimpleClusterStateIT.java | 4 +-- .../IndexNameExpressionResolverTests.java | 6 ++-- .../MetaDataCreateIndexServiceTests.java | 8 ++--- .../allocation/AllocationCommandsTests.java | 2 +- .../flush/SyncedFlushSingleNodeTests.java | 2 +- .../indices/state/OpenCloseIndexIT.java | 8 ++--- .../org/elasticsearch/mget/SimpleMgetIT.java | 4 +-- .../validate/SimpleValidateQueryIT.java | 2 +- .../xpack/ccr/FollowIndexIT.java | 4 +-- .../xpack/ccr/IndexFollowingIT.java | 4 +-- .../RollupResponseTranslationTests.java | 4 +-- .../security/authz/IndexAliasesTests.java | 2 +- .../authz/IndicesAndAliasesResolverTests.java | 29 ++++++++++++------- .../security/authz/ReadActionsTests.java | 12 ++++---- .../xpack/security/ReindexWithSecurityIT.java | 6 ++-- 30 files changed, 94 insertions(+), 81 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index b303d7df904..e679a85f67f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -201,7 +201,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); - assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", exception.getMessage()); + assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index [index]]", exception.getMessage()); assertEquals("index", exception.getMetadata("es.index").get(0)); } IndexRequest index = new IndexRequest("index", "type", "id"); @@ -288,7 +288,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals("id1", response.getResponses()[0].getFailure().getId()); assertEquals("type", response.getResponses()[0].getFailure().getType()); assertEquals("index", response.getResponses()[0].getFailure().getIndex()); - assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", + assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index [index]]", response.getResponses()[0].getFailure().getFailure().getMessage()); assertTrue(response.getResponses()[1].isFailed()); @@ -296,7 +296,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { assertEquals("id2", response.getResponses()[1].getId()); assertEquals("type", response.getResponses()[1].getType()); assertEquals("index", response.getResponses()[1].getIndex()); - assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", + assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index [index]]", response.getResponses()[1].getFailure().getFailure().getMessage()); } BulkRequest bulk = new BulkRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java index c5bc74e7517..92210521b3d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GetAliasesResponseTests.java @@ -109,7 +109,7 @@ public class GetAliasesResponseTests extends AbstractXContentTestCase execute(nonExistentIndexRequest, highLevelClient().indices()::updateAliases, highLevelClient().indices()::updateAliasesAsync)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); - assertThat(exception.getMessage(), equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + assertThat(exception.getMessage(), + equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); assertThat(exception.getMetadata("es.index"), hasItem(nonExistentIndex)); createIndex(index, Settings.EMPTY); @@ -583,7 +584,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { exception = expectThrows(ElasticsearchStatusException.class, () -> execute(mixedRequest, highLevelClient().indices()::updateAliases, highLevelClient().indices()::updateAliasesAsync)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); - assertThat(exception.getMessage(), equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + assertThat(exception.getMessage(), + equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); assertThat(exception.getMetadata("es.index"), hasItem(nonExistentIndex)); assertThat(exception.getMetadata("es.index"), not(hasItem(index))); assertThat(aliasExists(index, alias), equalTo(false)); @@ -595,7 +597,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { exception = expectThrows(ElasticsearchException.class, () -> execute(removeIndexRequest, highLevelClient().indices()::updateAliases, highLevelClient().indices()::updateAliasesAsync)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); - assertThat(exception.getMessage(), equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + assertThat(exception.getMessage(), + equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); assertThat(exception.getMetadata("es.index"), hasItem(nonExistentIndex)); assertThat(exception.getMetadata("es.index"), not(hasItem(index))); assertThat(aliasExists(index, alias), equalTo(false)); @@ -1060,7 +1063,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { highLevelClient().indices()::getAliasAsync); assertThat(getAliasesResponse.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(getAliasesResponse.getException().getMessage(), - equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index [index]]")); } { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(alias); @@ -1077,7 +1080,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { highLevelClient().indices()::getAliasAsync); assertThat(getAliasesResponse.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(getAliasesResponse.getException().getMessage(), - equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); } { GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices(index, "non_existent_index").aliases(alias); @@ -1085,7 +1088,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { highLevelClient().indices()::getAliasAsync); assertThat(getAliasesResponse.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(getAliasesResponse.getException().getMessage(), - equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); } { GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices("non_existent_index*"); @@ -1199,7 +1202,8 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(indexUpdateSettingsRequest, highLevelClient().indices()::putSettings, highLevelClient().indices()::putSettingsAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); - assertThat(exception.getMessage(), equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + assertThat(exception.getMessage(), + equalTo("Elasticsearch exception [type=index_not_found_exception, reason=no such index [index]]")); createIndex(index, Settings.EMPTY); exception = expectThrows(ElasticsearchException.class, () -> execute(indexUpdateSettingsRequest, diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 063fce9bcac..f6aa97def28 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -1079,7 +1079,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(exception.getIndex().getName(), equalTo("non_existent_index")); assertThat(exception.getDetailedMessage(), - containsString("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + containsString("Elasticsearch exception [type=index_not_found_exception, reason=no such index [non_existent_index]]")); } { ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "999"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index f80b532f5a4..1f1b11a0ff5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1715,7 +1715,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // TODO status is broken! fix in a followup // assertEquals(RestStatus.NOT_FOUND, ee.status()); // <4> assertThat(e.getMessage(), - containsString("reason=no such index")); // <5> + containsString("reason=no such index [missing_index]")); // <5> // end::multi-get-indexnotfound // tag::multi-get-execute-listener diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java index c3ec5ae1784..0463069609d 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java @@ -169,7 +169,7 @@ public class MultiSearchTemplateIT extends ESIntegTestCase { MultiSearchTemplateResponse.Item response4 = response.getResponses()[3]; assertThat(response4.isFailure(), is(true)); assertThat(response4.getFailure(), instanceOf(IndexNotFoundException.class)); - assertThat(response4.getFailure().getMessage(), equalTo("no such index")); + assertThat(response4.getFailure().getMessage(), equalTo("no such index [unknown]")); MultiSearchTemplateResponse.Item response5 = response.getResponses()[4]; assertThat(response5.isFailure(), is(false)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java index f5e234f66ca..87c7c07007e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java @@ -120,7 +120,7 @@ public class ReindexFailureTests extends ReindexTestCase { assertThat(e.getMessage(), either(containsString("all shards failed")) .or(containsString("No search context found")) - .or(containsString("no such index")) + .or(containsString("no such index [source]")) ); return; } diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/25_no_auto_create.yml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/25_no_auto_create.yml index 874174fda4c..961084a5c04 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/25_no_auto_create.yml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/25_no_auto_create.yml @@ -19,7 +19,7 @@ teardown: transient: action.auto_create_index: false - do: - catch: /no such index and \[action.auto_create_index\] is \[false\]/ + catch: /no such index \[dest\] and \[action.auto_create_index\] is \[false\]/ reindex: body: source: @@ -41,7 +41,7 @@ teardown: id: 1 body: { "text": "test" } - do: - catch: /no such index and \[action.auto_create_index\] \(\[test\]\) doesn't match/ + catch: /no such index \[dest\] and \[action.auto_create_index\] \(\[test\]\) doesn't match/ reindex: body: source: @@ -63,7 +63,7 @@ teardown: id: 1 body: { "text": "test" } - do: - catch: /no such index and \[action.auto_create_index\] contains \[-dest\] which forbids automatic creation of the index/ + catch: /no such index \[dest\] and \[action.auto_create_index\] contains \[-dest\] which forbids automatic creation of the index/ reindex: body: source: diff --git a/server/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java b/server/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java index d834d803384..f530a81e51d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java +++ b/server/src/main/java/org/elasticsearch/action/support/AutoCreateIndex.java @@ -73,10 +73,10 @@ public final class AutoCreateIndex { // One volatile read, so that all checks are done against the same instance: final AutoCreate autoCreate = this.autoCreate; if (autoCreate.autoCreateIndex == false) { - throw new IndexNotFoundException("no such index and [" + AUTO_CREATE_INDEX_SETTING.getKey() + "] is [false]", index); + throw new IndexNotFoundException("[" + AUTO_CREATE_INDEX_SETTING.getKey() + "] is [false]", index); } if (dynamicMappingDisabled) { - throw new IndexNotFoundException("no such index and [" + MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey() + "] is [false]", + throw new IndexNotFoundException("[" + MapperService.INDEX_MAPPER_DYNAMIC_SETTING.getKey() + "] is [false]", index); } // matches not set, default value of "true" @@ -90,11 +90,11 @@ public final class AutoCreateIndex { if (include) { return true; } - throw new IndexNotFoundException("no such index and [" + AUTO_CREATE_INDEX_SETTING.getKey() + "] contains [-" + throw new IndexNotFoundException("[" + AUTO_CREATE_INDEX_SETTING.getKey() + "] contains [-" + indexExpression + "] which forbids automatic creation of the index", index); } } - throw new IndexNotFoundException("no such index and [" + AUTO_CREATE_INDEX_SETTING.getKey() + "] ([" + autoCreate + throw new IndexNotFoundException("[" + AUTO_CREATE_INDEX_SETTING.getKey() + "] ([" + autoCreate + "]) doesn't match", index); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexNotFoundException.java b/server/src/main/java/org/elasticsearch/index/IndexNotFoundException.java index 4442ee276c9..2a4a89e3f90 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexNotFoundException.java +++ b/server/src/main/java/org/elasticsearch/index/IndexNotFoundException.java @@ -28,7 +28,7 @@ public final class IndexNotFoundException extends ResourceNotFoundException { * Construct with a custom message. */ public IndexNotFoundException(String message, String index) { - super(message); + super("no such index [" + index + "] and " + message); setIndex(index); } @@ -37,7 +37,7 @@ public final class IndexNotFoundException extends ResourceNotFoundException { } public IndexNotFoundException(String index, Throwable cause) { - super("no such index", cause); + super("no such index [" + index + "]", cause); setIndex(index); } @@ -46,7 +46,7 @@ public final class IndexNotFoundException extends ResourceNotFoundException { } public IndexNotFoundException(Index index, Throwable cause) { - super("no such index", cause); + super("no such index [" + index.getName() + "]", cause); setIndex(index); } diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 6e4c97fd3da..c014845ce09 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -114,7 +114,7 @@ public class ElasticsearchExceptionTests extends ESTestCase { ElasticsearchException[] rootCauses = exception.guessRootCauses(); assertEquals(rootCauses.length, 1); assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "index_not_found_exception"); - assertEquals(rootCauses[0].getMessage(), "no such index"); + assertEquals("no such index [foo]", rootCauses[0].getMessage()); ShardSearchFailure failure = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), new SearchShardTarget("node_1", new Index("foo", "_na_"), 1, null)); ShardSearchFailure failure1 = new ShardSearchFailure(new ParsingException(1, 2, "foobar", null), diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java index ba4aa3015f3..48914fca131 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java @@ -73,7 +73,7 @@ public class GetIndexIT extends ESIntegTestCase { client().admin().indices().prepareGetIndex().addIndices("missing_idx").get(); fail("Expected IndexNotFoundException"); } catch (IndexNotFoundException e) { - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [missing_idx]")); } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorClusterSettingsIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorClusterSettingsIT.java index ca1630b0064..e82be77fc14 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorClusterSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorClusterSettingsIT.java @@ -45,7 +45,7 @@ public class BulkProcessorClusterSettingsIT extends ESIntegTestCase { assertEquals(3, responses.length); assertFalse("Operation on existing index should succeed", responses[0].isFailed()); assertTrue("Missing index should have been flagged", responses[1].isFailed()); - assertEquals("[wontwork] IndexNotFoundException[no such index]", responses[1].getFailureMessage()); + assertEquals("[wontwork] IndexNotFoundException[no such index [wontwork]]", responses[1].getFailureMessage()); assertFalse("Operation on existing index should succeed", responses[2].isFailed()); } } diff --git a/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java b/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java index 84dfe5ec93e..4918939c90b 100644 --- a/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java @@ -83,9 +83,10 @@ public class AutoCreateIndexTests extends ESTestCase { public void testAutoCreationDisabled() { Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build(); AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings); + String randomIndex = randomAlphaOfLengthBetween(1, 10); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> - autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildClusterState())); - assertEquals("no such index and [action.auto_create_index] is [false]", e.getMessage()); + autoCreateIndex.shouldAutoCreate(randomIndex, buildClusterState())); + assertEquals("no such index [" + randomIndex + "] and [action.auto_create_index] is [false]", e.getMessage()); } public void testAutoCreationEnabled() { @@ -207,14 +208,15 @@ public class AutoCreateIndexTests extends ESTestCase { private void expectNotMatch(ClusterState clusterState, AutoCreateIndex autoCreateIndex, String index) { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> autoCreateIndex.shouldAutoCreate(index, clusterState)); - assertEquals("no such index and [action.auto_create_index] ([" + autoCreateIndex.getAutoCreate() + "]) doesn't match", - e.getMessage()); + assertEquals( + "no such index [" + index + "] and [action.auto_create_index] ([" + autoCreateIndex.getAutoCreate() + "]) doesn't match", + e.getMessage()); } private void expectForbidden(ClusterState clusterState, AutoCreateIndex autoCreateIndex, String index, String forbiddingPattern) { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> autoCreateIndex.shouldAutoCreate(index, clusterState)); - assertEquals("no such index and [action.auto_create_index] contains [" + forbiddingPattern + assertEquals("no such index [" + index + "] and [action.auto_create_index] contains [" + forbiddingPattern + "] which forbids automatic creation of the index", e.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java b/server/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java index 10a4c9f3e1d..2f75f6df1a8 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java @@ -76,7 +76,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase { MultiTermVectorsResponse response = mtvBuilder.execute().actionGet(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getFailure().getCause(), instanceOf(IndexNotFoundException.class)); - assertThat(response.getResponses()[0].getFailure().getCause().getMessage(), equalTo("no such index")); + assertThat(response.getResponses()[0].getFailure().getCause().getMessage(), equalTo("no such index [testX]")); } public void testMultiTermVectorsWithVersion() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java b/server/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java index 606e716d210..3e27b784e0a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/SimpleClusterStateIT.java @@ -271,7 +271,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase { client().admin().cluster().prepareState().clear().setMetaData(true).setIndices("a*").setIndicesOptions(allowNoIndices).get(); fail("Expected IndexNotFoundException"); } catch (IndexNotFoundException e) { - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [a*]")); } } @@ -282,7 +282,7 @@ public class SimpleClusterStateIT extends ESIntegTestCase { client().admin().cluster().prepareState().clear().setMetaData(true).setIndices("fzzbzz").setIndicesOptions(allowNoIndices).get(); fail("Expected IndexNotFoundException"); } catch (IndexNotFoundException e) { - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [fzzbzz]")); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 9ad9603b148..0832df7c896 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -530,7 +530,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndexNotFoundException infe = expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(context, "testZZZ")); - assertThat(infe.getMessage(), is("no such index")); + assertThat(infe.getMessage(), is("no such index [testZZZ]")); } public void testConcreteIndicesIgnoreIndicesOneMissingIndexOtherFound() { @@ -552,7 +552,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndexNotFoundException infe = expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(context, "testMo", "testMahdy")); - assertThat(infe.getMessage(), is("no such index")); + assertThat(infe.getMessage(), is("no such index [testMo]")); } public void testConcreteIndicesIgnoreIndicesEmptyRequest() { @@ -1161,7 +1161,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { IndexNotFoundException infe = expectThrows(IndexNotFoundException.class, () -> indexNameExpressionResolver.concreteIndexNames(state, new DeleteIndexRequest("does_not_exist"))); assertEquals("does_not_exist", infe.getIndex().getName()); - assertEquals("no such index", infe.getMessage()); + assertEquals("no such index [does_not_exist]", infe.getMessage()); } { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index efef803be84..5ccacee395a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -124,9 +124,9 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { MetaDataCreateIndexService.validateShrinkIndex(state, "target", Collections.emptySet(), "source", Settings.EMPTY) ).getMessage()); - assertEquals("no such index", + assertEquals("no such index [no_such_index]", expectThrows(IndexNotFoundException.class, () -> - MetaDataCreateIndexService.validateShrinkIndex(state, "no such index", Collections.emptySet(), "target", Settings.EMPTY) + MetaDataCreateIndexService.validateShrinkIndex(state, "no_such_index", Collections.emptySet(), "target", Settings.EMPTY) ).getMessage()); Settings targetSettings = Settings.builder().put("index.number_of_shards", 1).build(); @@ -200,9 +200,9 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { MetaDataCreateIndexService.validateSplitIndex(state, "target", Collections.emptySet(), "source", targetSettings) ).getMessage()); - assertEquals("no such index", + assertEquals("no such index [no_such_index]", expectThrows(IndexNotFoundException.class, () -> - MetaDataCreateIndexService.validateSplitIndex(state, "no such index", Collections.emptySet(), "target", targetSettings) + MetaDataCreateIndexService.validateSplitIndex(state, "no_such_index", Collections.emptySet(), "target", targetSettings) ).getMessage()); assertEquals("the number of source shards [10] must be less that the number of target shards [5]", diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index da0920e6937..1a50ac03e4c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -180,7 +180,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { allocation.reroute(clusterState, new AllocationCommands(randomAllocateCommand("test2", 0, "node2")), false, false); fail("expected ShardNotFoundException when allocating non-existing index"); } catch (IndexNotFoundException e) { - assertThat(e.getMessage(), containsString("no such index")); + assertThat(e.getMessage(), containsString("no such index [test2]")); } logger.info("--> allocating empty primary with acceptDataLoss flag set to false"); diff --git a/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 3bfcfdd3ab1..83411ad2bc2 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -156,7 +156,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { listener.latch.await(); assertNotNull(listener.error); assertNull(listener.result); - assertEquals("no such index", listener.error.getMessage()); + assertEquals("no such index [index not found]", listener.error.getMessage()); } public void testFailAfterIntermediateCommit() throws InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java index 96bb9ab8a1c..f37dde7ec09 100644 --- a/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java @@ -76,14 +76,14 @@ public class OpenCloseIndexIT extends ESIntegTestCase { Client client = client(); Exception e = expectThrows(IndexNotFoundException.class, () -> client.admin().indices().prepareClose("test1").execute().actionGet()); - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [test1]")); } public void testSimpleOpenMissingIndex() { Client client = client(); Exception e = expectThrows(IndexNotFoundException.class, () -> client.admin().indices().prepareOpen("test1").execute().actionGet()); - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [test1]")); } public void testCloseOneMissingIndex() { @@ -93,7 +93,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase { assertThat(healthResponse.isTimedOut(), equalTo(false)); Exception e = expectThrows(IndexNotFoundException.class, () -> client.admin().indices().prepareClose("test1", "test2").execute().actionGet()); - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [test2]")); } public void testCloseOneMissingIndexIgnoreMissing() { @@ -114,7 +114,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase { assertThat(healthResponse.isTimedOut(), equalTo(false)); Exception e = expectThrows(IndexNotFoundException.class, () -> client.admin().indices().prepareOpen("test1", "test2").execute().actionGet()); - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [test2]")); } public void testOpenOneMissingIndexIgnoreMissing() { diff --git a/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java b/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java index 670f9cdfa08..c7f27dc81a7 100644 --- a/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java +++ b/server/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java @@ -62,7 +62,7 @@ public class SimpleMgetIT extends ESIntegTestCase { assertThat(mgetResponse.getResponses()[1].getIndex(), is("nonExistingIndex")); assertThat(mgetResponse.getResponses()[1].isFailed(), is(true)); - assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), is("no such index")); + assertThat(mgetResponse.getResponses()[1].getFailure().getMessage(), is("no such index [nonExistingIndex]")); assertThat(((ElasticsearchException) mgetResponse.getResponses()[1].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex")); @@ -72,7 +72,7 @@ public class SimpleMgetIT extends ESIntegTestCase { assertThat(mgetResponse.getResponses().length, is(1)); assertThat(mgetResponse.getResponses()[0].getIndex(), is("nonExistingIndex")); assertThat(mgetResponse.getResponses()[0].isFailed(), is(true)); - assertThat(mgetResponse.getResponses()[0].getFailure().getMessage(), is("no such index")); + assertThat(mgetResponse.getResponses()[0].getFailure().getMessage(), is("no such index [nonExistingIndex]")); assertThat(((ElasticsearchException) mgetResponse.getResponses()[0].getFailure().getFailure()).getIndex().getName(), is("nonExistingIndex")); } diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 9ef47af29cd..2ef817e98ba 100644 --- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -151,7 +151,7 @@ public class SimpleValidateQueryIT extends ESIntegTestCase { client().admin().indices().prepareValidateQuery().get(); fail("Expected IndexNotFoundException"); } catch (IndexNotFoundException e) { - assertThat(e.getMessage(), is("no such index")); + assertThat(e.getMessage(), is("no such index [null]")); } } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index b038fbdaa03..ed3a03f0b17 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -71,11 +71,11 @@ public class FollowIndexIT extends ESCCRRestTestCase { public void testFollowNonExistingLeaderIndex() throws Exception { assumeFalse("Test should only run when both clusters are running", "leader".equals(targetCluster)); ResponseException e = expectThrows(ResponseException.class, () -> resumeFollow("non-existing-index")); - assertThat(e.getMessage(), containsString("no such index")); + assertThat(e.getMessage(), containsString("no such index [non-existing-index]")); assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); e = expectThrows(ResponseException.class, () -> followIndex("non-existing-index", "non-existing-index")); - assertThat(e.getMessage(), containsString("no such index")); + assertThat(e.getMessage(), containsString("no such index [non-existing-index]")); assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index d4afd4617ac..8bed8ffd0e0 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -522,7 +522,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(response.getStatsResponses().get(0).status().numberOfFailedFetches(), greaterThanOrEqualTo(1L)); ElasticsearchException fatalException = response.getStatsResponses().get(0).status().getFatalException(); assertThat(fatalException, notNullValue()); - assertThat(fatalException.getRootCause().getMessage(), equalTo("no such index")); + assertThat(fatalException.getRootCause().getMessage(), equalTo("no such index [index1]")); }); pauseFollow("index2"); ensureNoCcrTasks(); @@ -552,7 +552,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(response.getStatsResponses().get(0).status().numberOfFailedBulkOperations(), greaterThanOrEqualTo(1L)); ElasticsearchException fatalException = response.getStatsResponses().get(0).status().getFatalException(); assertThat(fatalException, notNullValue()); - assertThat(fatalException.getMessage(), equalTo("no such index")); + assertThat(fatalException.getMessage(), equalTo("no such index [index2]")); }); pauseFollow("index2"); ensureNoCcrTasks(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 576f37d7844..849461f1b62 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -231,7 +231,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase { MultiSearchResponse.Item missing = new MultiSearchResponse.Item(null, new IndexNotFoundException("foo")); Exception e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.verifyResponse(missing)); - assertThat(e.getMessage(), equalTo("no such index")); + assertThat(e.getMessage(), equalTo("no such index [foo]")); } public void testTranslateRollup() { @@ -287,7 +287,7 @@ public class RollupResponseTranslationTests extends AggregatorTestCase { Exception e = expectThrows(RuntimeException.class, () -> RollupResponseTranslator.translateResponse(new MultiSearchResponse.Item[]{missing}, context)); - assertThat(e.getMessage(), equalTo("no such index")); + assertThat(e.getMessage(), equalTo("no such index [foo]")); } public void testMissingFilter() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index dca113b6e42..711ca517d98 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -509,7 +509,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { //security plugin lets it through, but es core intercepts it due to strict indices options and throws index not found IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, client.admin().indices() .prepareGetAliases("alias_1").addIndices("test_1").setIndicesOptions(IndicesOptions.strictExpandOpen())::get); - assertEquals("no such index", indexNotFoundException.getMessage()); + assertEquals("no such index [test_1]", indexNotFoundException.getMessage()); //fails: no manage_aliases privilege on non_authorized alias assertThrowsAuthorizationException(client.admin().indices().prepareGetAliases("non_authorized").addIndices("test_1") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 1b2317925fc..3e7543ffd99 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -68,6 +68,9 @@ import org.elasticsearch.xpack.security.authz.IndicesAndAliasesResolver.Resolved import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.test.SecurityTestUtils; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; import org.junit.Before; import java.util.Arrays; @@ -469,7 +472,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME))); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [missing*]", e.getMessage()); } public void testResolveExplicitIndicesStrict() { @@ -506,7 +509,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(userNoIndices, SearchAction.NAME))); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [[]]", e.getMessage()); } public void testResolveMissingIndexStrict() { @@ -848,7 +851,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.aliases("alias2"); IndexNotFoundException exception = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(user, GetAliasesAction.NAME)).getLocal()); - assertEquals("no such index", exception.getMessage()); + assertEquals("no such index [[missing]]", exception.getMessage()); } public void testGetAliasesRequestMissingIndexIgnoreUnavailableAllowNoIndices() { @@ -928,7 +931,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.indices("non_matching_*"); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(user, GetAliasesAction.NAME)).getLocal()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [non_matching_*]", e.getMessage()); } public void testWildcardsGetAliasesRequestNoMatchingIndicesAllowNoIndices() { @@ -995,7 +998,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.indices("_all"); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(userNoIndices, GetAliasesAction.NAME))); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [[_all]]", e.getMessage()); } public void testWildcardsGetAliasesRequestNoAuthorizedIndicesAllowNoIndices() { @@ -1015,7 +1018,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { //current user is not authorized for any index, foo* resolves to no indices, the request fails IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(userNoIndices, GetAliasesAction.NAME))); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [foo*]", e.getMessage()); } public void testResolveAllAliasesGetAliasesRequest() { @@ -1151,7 +1154,7 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { ); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(tuple.v1(), buildAuthorizedIndices(user, tuple.v2())).getLocal()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [[remote:foo]]", e.getMessage()); } public void testNonRemotableRequestDoesNotAllowRemoteWildcardIndices() { @@ -1269,15 +1272,17 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.indicesOptions(IndicesOptions.fromOptions(true, false, randomBoolean(), randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME))); - assertEquals("no such index" , e.getMessage()); + assertEquals("no such index [[]]" , e.getMessage()); } public void testUnauthorizedDateMathExpressionStrict() { + String expectedIndex = "datetime-" + DateTimeFormat.forPattern("YYYY.MM.dd").print( + new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy()); SearchRequest request = new SearchRequest(""); request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME))); - assertEquals("no such index" , e.getMessage()); + assertEquals("no such index [" + expectedIndex + "]" , e.getMessage()); } public void testResolveDateMathExpression() { @@ -1309,15 +1314,17 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { request.indicesOptions(IndicesOptions.fromOptions(true, false, randomBoolean(), randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME))); - assertEquals("no such index" , e.getMessage()); + assertEquals("no such index [[]]" , e.getMessage()); } public void testMissingDateMathExpressionStrict() { + String expectedIndex = "foobar-" + DateTimeFormat.forPattern("YYYY.MM.dd").print( + new DateTime(DateTimeZone.UTC).monthOfYear().roundFloorCopy()); SearchRequest request = new SearchRequest(""); request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean())); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> resolveIndices(request, buildAuthorizedIndices(user, SearchAction.NAME))); - assertEquals("no such index" , e.getMessage()); + assertEquals("no such index [" + expectedIndex + "]" , e.getMessage()); } public void testAliasDateMathExpressionNotSupported() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index 76568d3d48b..e9ed559ab8e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -73,7 +73,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { createIndicesWithRandomAliases("test1", "test2", "index1", "index2"); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().prepareSearch("index*") .setIndicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())).get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [index*]", e.getMessage()); } public void testEmptyClusterSearchForAll() { @@ -83,7 +83,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { public void testEmptyClusterSearchForAllDisallowNoIndices() { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().prepareSearch() .setIndicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())).get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [[]]", e.getMessage()); } public void testEmptyClusterSearchForWildcard() { @@ -94,7 +94,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { public void testEmptyClusterSearchForWildcardDisallowNoIndices() { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().prepareSearch("*") .setIndicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())).get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [*]", e.getMessage()); } public void testEmptyAuthorizedIndicesSearchForAll() { @@ -106,7 +106,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { createIndicesWithRandomAliases("index1", "index2"); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().prepareSearch() .setIndicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())).get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [[]]", e.getMessage()); } public void testEmptyAuthorizedIndicesSearchForWildcard() { @@ -118,7 +118,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { createIndicesWithRandomAliases("index1", "index2"); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().prepareSearch("*") .setIndicesOptions(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())).get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [*]", e.getMessage()); } public void testExplicitNonAuthorizedIndex() { @@ -277,7 +277,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertReturnedIndices(multiSearchResponse.getResponses()[0].getResponse(), "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); assertThat(multiSearchResponse.getResponses()[1].getFailure().toString(), - equalTo("[test4] IndexNotFoundException[no such index]")); + equalTo("[test4] IndexNotFoundException[no such index [test4]]")); } { //we set ignore_unavailable and allow_no_indices to true, no errors returned, second item doesn't have hits. diff --git a/x-pack/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityIT.java b/x-pack/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityIT.java index 48e1a46c3ab..0a75565fbc0 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityIT.java +++ b/x-pack/qa/reindex-tests-with-security/src/test/java/org/elasticsearch/xpack/security/ReindexWithSecurityIT.java @@ -60,7 +60,7 @@ public class ReindexWithSecurityIT extends SecurityIntegTestCase { .source("test1", "index1") .filter(QueryBuilders.matchAllQuery()) .get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [index1]", e.getMessage()); } public void testUpdateByQuery() { @@ -75,7 +75,7 @@ public class ReindexWithSecurityIT extends SecurityIntegTestCase { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> new UpdateByQueryRequestBuilder(client(), UpdateByQueryAction.INSTANCE).source("test1", "index1").get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [index1]", e.getMessage()); } public void testReindex() { @@ -90,6 +90,6 @@ public class ReindexWithSecurityIT extends SecurityIntegTestCase { IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> new ReindexRequestBuilder(client(), ReindexAction.INSTANCE).source("test1", "index1").destination("dest").get()); - assertEquals("no such index", e.getMessage()); + assertEquals("no such index [index1]", e.getMessage()); } } From 1b085252c31a09b24759773ba8069d93de5753ba Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 24 Oct 2018 12:57:28 -0700 Subject: [PATCH 43/67] [Painless] Add instance bindings (#34410) This change adds instance bindings to Painless. This binding allows a whitelisted method to be called on an instance instantiated prior to script compilation. Whitelisting must be done in code as there is no practical way to instantiate a useful instance from a text file (see the tests for an example). Since an instance can be shared by multiple scripts, each method called must be thread-safe. --- .../elasticsearch/painless/spi/Whitelist.java | 8 +- .../painless/spi/WhitelistClassBinding.java | 4 +- .../spi/WhitelistInstanceBinding.java | 61 ++++++ .../painless/spi/WhitelistLoader.java | 3 +- .../org/elasticsearch/painless/Compiler.java | 10 +- .../org/elasticsearch/painless/Globals.java | 25 ++- .../painless/lookup/PainlessClassBinding.java | 1 - .../lookup/PainlessInstanceBinding.java | 64 ++++++ .../painless/lookup/PainlessLookup.java | 14 +- .../lookup/PainlessLookupBuilder.java | 187 ++++++++++++++++-- .../painless/node/ECallLocal.java | 27 ++- .../elasticsearch/painless/node/SSource.java | 30 ++- .../elasticsearch/painless/BindingsTests.java | 57 +++++- .../ExampleWhitelistExtension.java | 12 +- .../ExampleWhitelistedInstance.java | 36 ++++ .../test/painless_whitelist/40_instance.yml | 41 ++++ 16 files changed, 533 insertions(+), 47 deletions(-) create mode 100644 modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistInstanceBinding.java create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessInstanceBinding.java create mode 100644 plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistedInstance.java create mode 100644 plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java index c36625ad145..b08b38d2bfc 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java @@ -66,13 +66,17 @@ public final class Whitelist { /** The {@link List} of all the whitelisted Painless class bindings. */ public final List whitelistClassBindings; + /** The {@link List} of all the whitelisted Painless instance bindings. */ + public final List whitelistInstanceBindings; + /** Standard constructor. All values must be not {@code null}. */ - public Whitelist(ClassLoader classLoader, List whitelistClasses, - List whitelistImportedMethods, List whitelistClassBindings) { + public Whitelist(ClassLoader classLoader, List whitelistClasses, List whitelistImportedMethods, + List whitelistClassBindings, List whitelistInstanceBindings) { this.classLoader = Objects.requireNonNull(classLoader); this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistImportedMethods)); this.whitelistClassBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistClassBindings)); + this.whitelistInstanceBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistInstanceBindings)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClassBinding.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClassBinding.java index f1e762b37c0..da199174648 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClassBinding.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClassBinding.java @@ -42,9 +42,7 @@ public class WhitelistClassBinding { /** The method name for this class binding. */ public final String methodName; - /** - * The canonical type name for the return type. - */ + /** The canonical type name for the return type. */ public final String returnCanonicalTypeName; /** diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistInstanceBinding.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistInstanceBinding.java new file mode 100644 index 00000000000..46c2f0f91fe --- /dev/null +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistInstanceBinding.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.spi; + +import java.util.List; +import java.util.Objects; + +/** + * An instance binding represents a method call that stores state. Each instance binding must provide + * exactly one public method name. The canonical type name parameters provided must match those of the + * method. The method for an instance binding will target the specified Java instance. + */ +public class WhitelistInstanceBinding { + + /** Information about where this constructor was whitelisted from. */ + public final String origin; + + /** The Java instance this instance binding targets. */ + public final Object targetInstance; + + /** The method name for this class binding. */ + public final String methodName; + + /** The canonical type name for the return type. */ + public final String returnCanonicalTypeName; + + /** + * A {@link List} of {@link String}s that are the Painless type names for the parameters of the + * constructor which can be used to look up the Java constructor through reflection. + */ + public final List canonicalTypeNameParameters; + + /** Standard constructor. All values must be not {@code null}. */ + public WhitelistInstanceBinding(String origin, Object targetInstance, + String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { + + this.origin = Objects.requireNonNull(origin); + this.targetInstance = Objects.requireNonNull(targetInstance); + + this.methodName = Objects.requireNonNull(methodName); + this.returnCanonicalTypeName = Objects.requireNonNull(returnCanonicalTypeName); + this.canonicalTypeNameParameters = Objects.requireNonNull(canonicalTypeNameParameters); + } +} diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java index 560010a35e9..d896c345a47 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java @@ -29,6 +29,7 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; /** Loads and creates a {@link Whitelist} from one to many text files. */ @@ -392,7 +393,7 @@ public final class WhitelistLoader { ClassLoader loader = AccessController.doPrivileged((PrivilegedAction)resource::getClassLoader); - return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings); + return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistClassBindings, Collections.emptyList()); } private WhitelistLoader() {} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 0fbdfa763ea..81cc802916d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless; import org.elasticsearch.bootstrap.BootstrapInfo; -import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.antlr.Walker; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.node.SSource; @@ -222,8 +221,8 @@ final class Compiler { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, painlessLookup, null); - Map localMethods = root.analyze(painlessLookup); - root.write(); + root.analyze(painlessLookup); + Map statics = root.write(); try { Class clazz = loader.defineScript(CLASS_NAME, root.getBytes()); @@ -231,7 +230,10 @@ final class Compiler { clazz.getField("$SOURCE").set(null, source); clazz.getField("$STATEMENTS").set(null, root.getStatements()); clazz.getField("$DEFINITION").set(null, painlessLookup); - clazz.getField("$LOCALS").set(null, localMethods); + + for (Map.Entry statik : statics.entrySet()) { + clazz.getField(statik.getKey()).set(null, statik.getValue()); + } return clazz.getConstructors()[0]; } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java index d18cf2780cf..a6a15b8ce1e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java @@ -31,7 +31,8 @@ import java.util.Map; public class Globals { private final Map syntheticMethods = new HashMap<>(); private final Map constantInitializers = new HashMap<>(); - private final Map> bindings = new HashMap<>(); + private final Map> classBindings = new HashMap<>(); + private final Map instanceBindings = new HashMap<>(); private final BitSet statements; /** Create a new Globals from the set of statement boundaries */ @@ -56,14 +57,19 @@ public class Globals { } } - /** Adds a new binding to be written as a local variable */ - public String addBinding(Class type) { - String name = "$binding$" + bindings.size(); - bindings.put(name, type); + /** Adds a new class binding to be written as a local variable */ + public String addClassBinding(Class type) { + String name = "$class_binding$" + classBindings.size(); + classBindings.put(name, type); return name; } + /** Adds a new binding to be written as a local variable */ + public String addInstanceBinding(Object instance) { + return instanceBindings.computeIfAbsent(instance, key -> "$instance_binding$" + instanceBindings.size()); + } + /** Returns the current synthetic methods */ public Map getSyntheticMethods() { return syntheticMethods; @@ -75,8 +81,13 @@ public class Globals { } /** Returns the current bindings */ - public Map> getBindings() { - return bindings; + public Map> getClassBindings() { + return classBindings; + } + + /** Returns the current bindings */ + public Map getInstanceBindings() { + return instanceBindings; } /** Returns the set of statement boundaries */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java index 0f28830b3d4..aedbc936bb1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java @@ -60,7 +60,6 @@ public class PainlessClassBinding { @Override public int hashCode() { - return Objects.hash(javaConstructor, javaMethod, returnType, typeParameters); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessInstanceBinding.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessInstanceBinding.java new file mode 100644 index 00000000000..6952a3f05fb --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessInstanceBinding.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import java.lang.reflect.Method; +import java.util.List; +import java.util.Objects; + +public class PainlessInstanceBinding { + + public final Object targetInstance; + public final Method javaMethod; + + public final Class returnType; + public final List> typeParameters; + + PainlessInstanceBinding(Object targetInstance, Method javaMethod, Class returnType, List> typeParameters) { + this.targetInstance = targetInstance; + this.javaMethod = javaMethod; + + this.returnType = returnType; + this.typeParameters = typeParameters; + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessInstanceBinding that = (PainlessInstanceBinding)object; + + return targetInstance == that.targetInstance && + Objects.equals(javaMethod, that.javaMethod) && + Objects.equals(returnType, that.returnType) && + Objects.equals(typeParameters, that.typeParameters); + } + + @Override + public int hashCode() { + return Objects.hash(targetInstance, javaMethod, returnType, typeParameters); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index ce31db43eef..5ac5d5bf784 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -40,13 +40,15 @@ public final class PainlessLookup { private final Map painlessMethodKeysToImportedPainlessMethods; private final Map painlessMethodKeysToPainlessClassBindings; + private final Map painlessMethodKeysToPainlessInstanceBindings; PainlessLookup( Map> javaClassNamesToClasses, Map> canonicalClassNamesToClasses, Map, PainlessClass> classesToPainlessClasses, Map painlessMethodKeysToImportedPainlessMethods, - Map painlessMethodKeysToPainlessClassBindings) { + Map painlessMethodKeysToPainlessClassBindings, + Map painlessMethodKeysToPainlessInstanceBindings) { Objects.requireNonNull(javaClassNamesToClasses); Objects.requireNonNull(canonicalClassNamesToClasses); @@ -54,6 +56,7 @@ public final class PainlessLookup { Objects.requireNonNull(painlessMethodKeysToImportedPainlessMethods); Objects.requireNonNull(painlessMethodKeysToPainlessClassBindings); + Objects.requireNonNull(painlessMethodKeysToPainlessInstanceBindings); this.javaClassNamesToClasses = javaClassNamesToClasses; this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses); @@ -61,6 +64,7 @@ public final class PainlessLookup { this.painlessMethodKeysToImportedPainlessMethods = Collections.unmodifiableMap(painlessMethodKeysToImportedPainlessMethods); this.painlessMethodKeysToPainlessClassBindings = Collections.unmodifiableMap(painlessMethodKeysToPainlessClassBindings); + this.painlessMethodKeysToPainlessInstanceBindings = Collections.unmodifiableMap(painlessMethodKeysToPainlessInstanceBindings); } public Class javaClassNameToClass(String javaClassName) { @@ -200,6 +204,14 @@ public final class PainlessLookup { return painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); } + public PainlessInstanceBinding lookupPainlessInstanceBinding(String methodName, int arity) { + Objects.requireNonNull(methodName); + + String painlessMethodKey = buildPainlessMethodKey(methodName, arity); + + return painlessMethodKeysToPainlessInstanceBindings.get(painlessMethodKey); + } + public PainlessMethod lookupFunctionalInterfacePainlessMethod(Class targetClass) { PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 552ad56f68a..495a4ea94c9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -24,6 +24,7 @@ import org.elasticsearch.painless.spi.WhitelistClass; import org.elasticsearch.painless.spi.WhitelistClassBinding; import org.elasticsearch.painless.spi.WhitelistConstructor; import org.elasticsearch.painless.spi.WhitelistField; +import org.elasticsearch.painless.spi.WhitelistInstanceBinding; import org.elasticsearch.painless.spi.WhitelistMethod; import java.lang.invoke.MethodHandle; @@ -50,10 +51,11 @@ import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCan public final class PainlessLookupBuilder { - private static final Map painlessConstructorCache = new HashMap<>(); - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); - private static final Map painlessClassBindingCache = new HashMap<>(); + private static final Map painlessConstructorCache = new HashMap<>(); + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); + private static final Map painlessClassBindingCache = new HashMap<>(); + private static final Map painlessInstanceBindingCache = new HashMap<>(); private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); @@ -108,9 +110,15 @@ public final class PainlessLookupBuilder { for (WhitelistClassBinding whitelistClassBinding : whitelist.whitelistClassBindings) { origin = whitelistClassBinding.origin; painlessLookupBuilder.addPainlessClassBinding( - whitelist.classLoader, whitelistClassBinding.targetJavaClassName, - whitelistClassBinding.methodName, whitelistClassBinding.returnCanonicalTypeName, - whitelistClassBinding.canonicalTypeNameParameters); + whitelist.classLoader, whitelistClassBinding.targetJavaClassName, whitelistClassBinding.methodName, + whitelistClassBinding.returnCanonicalTypeName, whitelistClassBinding.canonicalTypeNameParameters); + } + + for (WhitelistInstanceBinding whitelistInstanceBinding : whitelist.whitelistInstanceBindings) { + origin = whitelistInstanceBinding.origin; + painlessLookupBuilder.addPainlessInstanceBinding( + whitelistInstanceBinding.targetInstance, whitelistInstanceBinding.methodName, + whitelistInstanceBinding.returnCanonicalTypeName, whitelistInstanceBinding.canonicalTypeNameParameters); } } } catch (Exception exception) { @@ -134,6 +142,7 @@ public final class PainlessLookupBuilder { private final Map painlessMethodKeysToImportedPainlessMethods; private final Map painlessMethodKeysToPainlessClassBindings; + private final Map painlessMethodKeysToPainlessInstanceBindings; public PainlessLookupBuilder() { javaClassNamesToClasses = new HashMap<>(); @@ -142,6 +151,7 @@ public final class PainlessLookupBuilder { painlessMethodKeysToImportedPainlessMethods = new HashMap<>(); painlessMethodKeysToPainlessClassBindings = new HashMap<>(); + painlessMethodKeysToPainlessInstanceBindings = new HashMap<>(); } private Class canonicalTypeNameToType(String canonicalTypeName) { @@ -763,6 +773,10 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("imported method and class binding cannot have the same name [" + methodName + "]"); } + if (painlessMethodKeysToPainlessInstanceBindings.containsKey(painlessMethodKey)) { + throw new IllegalArgumentException("imported method and instance binding cannot have the same name [" + methodName + "]"); + } + MethodHandle methodHandle; try { @@ -783,7 +797,7 @@ public final class PainlessLookupBuilder { painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, newImportedPainlessMethod); } else if (newImportedPainlessMethod.equals(existingImportedPainlessMethod) == false) { throw new IllegalArgumentException("cannot add imported methods with the same name and arity " + - "but are not equivalent for methods " + + "but do not have equivalent methods " + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + @@ -942,6 +956,11 @@ public final class PainlessLookupBuilder { } } + if (isValidType(returnType) == false) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for class binding " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } + if (javaMethod.getReturnType() != typeToJavaType(returnType)) { throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + @@ -955,6 +974,15 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("class binding and imported method cannot have the same name [" + methodName + "]"); } + if (painlessMethodKeysToPainlessInstanceBindings.containsKey(painlessMethodKey)) { + throw new IllegalArgumentException("class binding and instance binding cannot have the same name [" + methodName + "]"); + } + + if (Modifier.isStatic(javaMethod.getModifiers())) { + throw new IllegalArgumentException("class binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "] cannot be static"); + } + PainlessClassBinding existingPainlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); PainlessClassBinding newPainlessClassBinding = new PainlessClassBinding(javaConstructor, javaMethod, returnType, typeParameters); @@ -962,9 +990,9 @@ public final class PainlessLookupBuilder { if (existingPainlessClassBinding == null) { newPainlessClassBinding = painlessClassBindingCache.computeIfAbsent(newPainlessClassBinding, key -> key); painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, newPainlessClassBinding); - } else if (newPainlessClassBinding.equals(existingPainlessClassBinding)) { + } else if (newPainlessClassBinding.equals(existingPainlessClassBinding) == false) { throw new IllegalArgumentException("cannot add class bindings with the same name and arity " + - "but are not equivalent for methods " + + "but do not have equivalent methods " + "[[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + @@ -976,6 +1004,136 @@ public final class PainlessLookupBuilder { } } + public void addPainlessInstanceBinding(Object targetInstance, + String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { + + Objects.requireNonNull(targetInstance); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnCanonicalTypeName); + Objects.requireNonNull(canonicalTypeNameParameters); + + Class targetClass = targetInstance.getClass(); + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); + List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); + + for (String canonicalTypeNameParameter : canonicalTypeNameParameters) { + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); + + if (typeParameter == null) { + throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for instance binding " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + } + + typeParameters.add(typeParameter); + } + + Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); + + if (returnType == null) { + throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for class binding " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + } + + addPainlessInstanceBinding(targetInstance, methodName, returnType, typeParameters); + } + + public void addPainlessInstanceBinding(Object targetInstance, String methodName, Class returnType, List> typeParameters) { + Objects.requireNonNull(targetInstance); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnType); + Objects.requireNonNull(typeParameters); + + Class targetClass = targetInstance.getClass(); + + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add instance binding as reserved class [" + DEF_CLASS_NAME + "]"); + } + + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); + Class existingTargetClass = javaClassNamesToClasses.get(targetClass.getName()); + + if (existingTargetClass == null) { + javaClassNamesToClasses.put(targetClass.getName(), targetClass); + } else if (existingTargetClass != targetClass) { + throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] " + + "cannot represent multiple java classes with the same name from different class loaders"); + } + + if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + throw new IllegalArgumentException( + "invalid method name [" + methodName + "] for instance binding [" + targetCanonicalClassName + "]."); + } + + int typeParametersSize = typeParameters.size(); + List> javaTypeParameters = new ArrayList<>(typeParametersSize); + + for (Class typeParameter : typeParameters) { + if (isValidType(typeParameter) == false) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + + "not found for instance binding [[" + targetCanonicalClassName + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + javaTypeParameters.add(typeToJavaType(typeParameter)); + } + + if (isValidType(returnType) == false) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for imported method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + Method javaMethod; + + try { + javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("instance binding reflection object [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + } + + if (javaMethod.getReturnType() != typeToJavaType(returnType)) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + + "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + + "for instance binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + if (Modifier.isStatic(javaMethod.getModifiers())) { + throw new IllegalArgumentException("instance binding [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "] cannot be static"); + } + + String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + + if (painlessMethodKeysToImportedPainlessMethods.containsKey(painlessMethodKey)) { + throw new IllegalArgumentException("instance binding and imported method cannot have the same name [" + methodName + "]"); + } + + if (painlessMethodKeysToPainlessClassBindings.containsKey(painlessMethodKey)) { + throw new IllegalArgumentException("instance binding and class binding cannot have the same name [" + methodName + "]"); + } + + PainlessInstanceBinding existingPainlessInstanceBinding = painlessMethodKeysToPainlessInstanceBindings.get(painlessMethodKey); + PainlessInstanceBinding newPainlessInstanceBinding = + new PainlessInstanceBinding(targetInstance, javaMethod, returnType, typeParameters); + + if (existingPainlessInstanceBinding == null) { + newPainlessInstanceBinding = painlessInstanceBindingCache.computeIfAbsent(newPainlessInstanceBinding, key -> key); + painlessMethodKeysToPainlessInstanceBindings.put(painlessMethodKey, newPainlessInstanceBinding); + } else if (newPainlessInstanceBinding.equals(existingPainlessInstanceBinding) == false) { + throw new IllegalArgumentException("cannot add instances bindings with the same name and arity " + + "but do not have equivalent methods " + + "[[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + + "[" + typeToCanonicalTypeName(existingPainlessInstanceBinding.returnType) + "], " + + typesToCanonicalTypeNames(existingPainlessInstanceBinding.typeParameters) + "]"); + } + } + public PainlessLookup build() { copyPainlessClassMembers(); cacheRuntimeHandles(); @@ -1003,8 +1161,13 @@ public final class PainlessLookupBuilder { "must have the same classes as the keys of classes to painless classes"); } - return new PainlessLookup(javaClassNamesToClasses, canonicalClassNamesToClasses, classesToPainlessClasses, - painlessMethodKeysToImportedPainlessMethods, painlessMethodKeysToPainlessClassBindings); + return new PainlessLookup( + javaClassNamesToClasses, + canonicalClassNamesToClasses, + classesToPainlessClasses, + painlessMethodKeysToImportedPainlessMethods, + painlessMethodKeysToPainlessClassBindings, + painlessMethodKeysToPainlessInstanceBindings); } private void copyPainlessClassMembers() { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index e613018dbc5..2d49f4df648 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -25,6 +25,7 @@ import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClassBinding; +import org.elasticsearch.painless.lookup.PainlessInstanceBinding; import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Label; import org.objectweb.asm.Type; @@ -48,6 +49,7 @@ public final class ECallLocal extends AExpression { private LocalMethod localMethod = null; private PainlessMethod importedMethod = null; private PainlessClassBinding classBinding = null; + private PainlessInstanceBinding instanceBinding = null; public ECallLocal(Location location, String name, List arguments) { super(location); @@ -74,8 +76,12 @@ public final class ECallLocal extends AExpression { classBinding = locals.getPainlessLookup().lookupPainlessClassBinding(name, arguments.size()); if (classBinding == null) { - throw createError( - new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); + instanceBinding = locals.getPainlessLookup().lookupPainlessInstanceBinding(name, arguments.size()); + + if (instanceBinding == null) { + throw createError( + new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); + } } } } @@ -91,6 +97,9 @@ public final class ECallLocal extends AExpression { } else if (classBinding != null) { typeParameters = new ArrayList<>(classBinding.typeParameters); actual = classBinding.returnType; + } else if (instanceBinding != null) { + typeParameters = new ArrayList<>(instanceBinding.typeParameters); + actual = instanceBinding.returnType; } else { throw new IllegalStateException("Illegal tree structure."); } @@ -125,7 +134,7 @@ public final class ECallLocal extends AExpression { writer.invokeStatic(Type.getType(importedMethod.targetClass), new Method(importedMethod.javaMethod.getName(), importedMethod.methodType.toMethodDescriptorString())); } else if (classBinding != null) { - String name = globals.addBinding(classBinding.javaConstructor.getDeclaringClass()); + String name = globals.addClassBinding(classBinding.javaConstructor.getDeclaringClass()); Type type = Type.getType(classBinding.javaConstructor.getDeclaringClass()); int javaConstructorParameterCount = classBinding.javaConstructor.getParameterCount(); @@ -154,6 +163,18 @@ public final class ECallLocal extends AExpression { } writer.invokeVirtual(type, Method.getMethod(classBinding.javaMethod)); + } else if (instanceBinding != null) { + String name = globals.addInstanceBinding(instanceBinding.targetInstance); + Type type = Type.getType(instanceBinding.targetInstance.getClass()); + + writer.loadThis(); + writer.getStatic(CLASS_TYPE, name, type); + + for (int argument = 0; argument < instanceBinding.javaMethod.getParameterCount(); ++argument) { + arguments.get(argument).write(writer, globals); + } + + writer.invokeVirtual(type, Method.getMethod(instanceBinding.javaMethod)); } else { throw new IllegalStateException("Illegal tree structure."); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index 01946066af9..ca20cf19a5f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -164,7 +164,7 @@ public final class SSource extends AStatement { throw new IllegalStateException("Illegal tree structure."); } - public Map analyze(PainlessLookup painlessLookup) { + public void analyze(PainlessLookup painlessLookup) { Map methods = new HashMap<>(); for (SFunction function : functions) { @@ -180,8 +180,6 @@ public final class SSource extends AStatement { Locals locals = Locals.newProgramScope(painlessLookup, methods.values()); analyze(locals); - - return locals.getMethods(); } @Override @@ -228,7 +226,7 @@ public final class SSource extends AStatement { } } - public void write() { + public Map write() { // Create the ClassWriter. int classFrames = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS; @@ -359,13 +357,20 @@ public final class SSource extends AStatement { clinit.endMethod(); } - // Write binding variables - for (Map.Entry> binding : globals.getBindings().entrySet()) { - String name = binding.getKey(); - String descriptor = Type.getType(binding.getValue()).getDescriptor(); + // Write class binding variables + for (Map.Entry> classBinding : globals.getClassBindings().entrySet()) { + String name = classBinding.getKey(); + String descriptor = Type.getType(classBinding.getValue()).getDescriptor(); visitor.visitField(Opcodes.ACC_PRIVATE, name, descriptor, null, null).visitEnd(); } + // Write instance binding variables + for (Map.Entry instanceBinding : globals.getInstanceBindings().entrySet()) { + String name = instanceBinding.getValue(); + String descriptor = Type.getType(instanceBinding.getKey().getClass()).getDescriptor(); + visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, name, descriptor, null, null).visitEnd(); + } + // Write any needsVarName methods for used variables for (org.objectweb.asm.commons.Method needsMethod : scriptClassInfo.getNeedsMethods()) { String name = needsMethod.getName(); @@ -382,6 +387,15 @@ public final class SSource extends AStatement { visitor.visitEnd(); bytes = writer.toByteArray(); + + Map statics = new HashMap<>(); + statics.put("$LOCALS", mainMethod.getMethods()); + + for (Map.Entry instanceBinding : globals.getInstanceBindings().entrySet()) { + statics.put(instanceBinding.getValue(), instanceBinding.getKey()); + } + + return statics; } @Override diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java index 167deb3a20b..3f3d589702a 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BindingsTests.java @@ -20,14 +20,32 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.painless.spi.WhitelistInstanceBinding; import org.elasticsearch.script.ScriptContext; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; public class BindingsTests extends ScriptTestCase { + public static class InstanceBindingTestClass { + private int value; + + public InstanceBindingTestClass(int value) { + this.value = value; + } + + public void setInstanceBindingValue(int value) { + this.value = value; + } + + public int getInstanceBindingValue() { + return value; + } + } + public abstract static class BindingsTestScript { public static final String[] PARAMETERS = { "test", "bound" }; public abstract int execute(int test, int bound); @@ -40,15 +58,29 @@ public class BindingsTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = super.scriptContexts(); - contexts.put(BindingsTestScript.CONTEXT, Whitelist.BASE_WHITELISTS); + List whitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS); + + InstanceBindingTestClass instanceBindingTestClass = new InstanceBindingTestClass(1); + WhitelistInstanceBinding getter = new WhitelistInstanceBinding("test", instanceBindingTestClass, + "setInstanceBindingValue", "void", Collections.singletonList("int")); + WhitelistInstanceBinding setter = new WhitelistInstanceBinding("test", instanceBindingTestClass, + "getInstanceBindingValue", "int", Collections.emptyList()); + List instanceBindingsList = new ArrayList<>(); + instanceBindingsList.add(getter); + instanceBindingsList.add(setter); + Whitelist instanceBindingsWhitelist = new Whitelist(instanceBindingTestClass.getClass().getClassLoader(), + Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), instanceBindingsList); + whitelists.add(instanceBindingsWhitelist); + + contexts.put(BindingsTestScript.CONTEXT, whitelists); return contexts; } - public void testBasicBinding() { + public void testBasicClassBinding() { assertEquals(15, exec("testAddWithState(4, 5, 6, 0.0)")); } - public void testRepeatedBinding() { + public void testRepeatedClassBinding() { String script = "testAddWithState(4, 5, test, 0.0)"; BindingsTestScript.Factory factory = scriptEngine.compile(null, script, BindingsTestScript.CONTEXT, Collections.emptyMap()); BindingsTestScript executableScript = factory.newInstance(); @@ -58,7 +90,7 @@ public class BindingsTests extends ScriptTestCase { assertEquals(16, executableScript.execute(7, 0)); } - public void testBoundBinding() { + public void testBoundClassBinding() { String script = "testAddWithState(4, bound, test, 0.0)"; BindingsTestScript.Factory factory = scriptEngine.compile(null, script, BindingsTestScript.CONTEXT, Collections.emptyMap()); BindingsTestScript executableScript = factory.newInstance(); @@ -66,4 +98,21 @@ public class BindingsTests extends ScriptTestCase { assertEquals(10, executableScript.execute(5, 1)); assertEquals(9, executableScript.execute(4, 2)); } + + public void testInstanceBinding() { + String script = "getInstanceBindingValue() + test + bound"; + BindingsTestScript.Factory factory = scriptEngine.compile(null, script, BindingsTestScript.CONTEXT, Collections.emptyMap()); + BindingsTestScript executableScript = factory.newInstance(); + assertEquals(3, executableScript.execute(1, 1)); + + script = "setInstanceBindingValue(test + bound); getInstanceBindingValue()"; + factory = scriptEngine.compile(null, script, BindingsTestScript.CONTEXT, Collections.emptyMap()); + executableScript = factory.newInstance(); + assertEquals(4, executableScript.execute(-2, 6)); + + script = "getInstanceBindingValue() + test + bound"; + factory = scriptEngine.compile(null, script, BindingsTestScript.CONTEXT, Collections.emptyMap()); + executableScript = factory.newInstance(); + assertEquals(8, executableScript.execute(-2, 6)); + } } diff --git a/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistExtension.java b/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistExtension.java index ca35db5a81b..d0b03708a08 100644 --- a/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistExtension.java +++ b/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistExtension.java @@ -21,10 +21,12 @@ package org.elasticsearch.example.painlesswhitelist; import org.elasticsearch.painless.spi.PainlessExtension; import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.painless.spi.WhitelistInstanceBinding; import org.elasticsearch.painless.spi.WhitelistLoader; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.ScriptContext; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -37,6 +39,14 @@ public class ExampleWhitelistExtension implements PainlessExtension { @Override public Map, List> getContextWhitelists() { - return Collections.singletonMap(FieldScript.CONTEXT, Collections.singletonList(WHITELIST)); + ExampleWhitelistedInstance ewi = new ExampleWhitelistedInstance(1); + WhitelistInstanceBinding addValue = new WhitelistInstanceBinding("example addValue", ewi, + "addValue", "int", Collections.singletonList("int")); + WhitelistInstanceBinding getValue = new WhitelistInstanceBinding("example getValue", ewi, + "getValue", "int", Collections.emptyList()); + Whitelist instanceWhitelist = new Whitelist(ewi.getClass().getClassLoader(), Collections.emptyList(), + Collections.emptyList(), Collections.emptyList(), Arrays.asList(addValue, getValue)); + + return Collections.singletonMap(FieldScript.CONTEXT, Arrays.asList(WHITELIST, instanceWhitelist)); } } diff --git a/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistedInstance.java b/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistedInstance.java new file mode 100644 index 00000000000..1d48cecb9b4 --- /dev/null +++ b/plugins/examples/painless-whitelist/src/main/java/org/elasticsearch/example/painlesswhitelist/ExampleWhitelistedInstance.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example.painlesswhitelist; + +public class ExampleWhitelistedInstance { + private final int value; + + public ExampleWhitelistedInstance(int value) { + this.value = value; + } + + public int addValue(int value) { + return this.value + value; + } + + public int getValue() { + return value; + } +} diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml new file mode 100644 index 00000000000..6cb7e4f3d40 --- /dev/null +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml @@ -0,0 +1,41 @@ +# Example tests using an instance binding + +"custom instance binding": +- do: + index: + index: test + type: test + id: 1 + body: { "num1": 1 } +- do: + indices.refresh: {} + +- do: + index: test + search: + body: + query: + match_all: {} + script_fields: + sNum1: + script: + source: "addValue((int)doc['num1'][0])" + lang: painless + +- match: { hits.total: 1 } +- match: { hits.hits.0.fields.sNum1.0: 2 } + +- do: + index: test + search: + body: + query: + match_all: {} + script_fields: + sNum1: + script: + source: "getValue() + doc['num1'][0]" + lang: painless + +- match: { hits.total: 1 } +- match: { hits.hits.0.fields.sNum1.0: 2 } From 6cceba9b54083cc3c3f0efa7327a2c5dcabf7b11 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 24 Oct 2018 15:59:58 -0400 Subject: [PATCH 44/67] Correct CRUDDocumentationIT.java relates to #33447 --- .../client/documentation/CRUDDocumentationIT.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 1f1b11a0ff5..dd2c6d48322 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1564,7 +1564,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::term-vectors-execute TermVectorsResponse response = client.termvectors(request, RequestOptions.DEFAULT); - // end:::term-vectors-execute + // end::term-vectors-execute // tag::term-vectors-response @@ -1572,7 +1572,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { String type = response.getType(); // <2> String id = response.getId(); // <3> boolean found = response.getFound(); // <4> - // end:::term-vectors-response + // end::term-vectors-response // tag::term-vectors-term-vectors if (response.getTermVectorsList() != null) { @@ -1603,7 +1603,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } } - // end:::term-vectors-term-vectors + // end::term-vectors-term-vectors // tag::term-vectors-execute-listener ActionListener listener = new ActionListener() { From 3211760e2cd5909217c9acc150bc676f842990d7 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 24 Oct 2018 16:35:20 -0400 Subject: [PATCH 45/67] Correct term-vectors.asciidoc relates to #33447 --- .../high-level/document/term-vectors.asciidoc | 32 ------------------- 1 file changed, 32 deletions(-) diff --git a/docs/java-rest/high-level/document/term-vectors.asciidoc b/docs/java-rest/high-level/document/term-vectors.asciidoc index ec24a0ecef4..1e119d21a91 100644 --- a/docs/java-rest/high-level/document/term-vectors.asciidoc +++ b/docs/java-rest/high-level/document/term-vectors.asciidoc @@ -100,35 +100,3 @@ include-tagged::{doc-tests-file}[{api}-term-vectors] <14> Start offset of the token <15> End offset of the token <16> Payload of the token - - -[id="{upid}-{api}-response"] -==== TermVectorsResponse - -The `TermVectorsResponse` contains the following information: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests-file}[{api}-response] --------------------------------------------------- -<1> The index name of the document. -<2> The type name of the document. -<3> The id of the document. -<4> Indicates whether or not the document found. -<5> Indicates whether or not there are term vectors for this document. -<6> The list of `TermVector` for the document -<7> The name of the current field -<8> Fields statistics for the current field - document count -<9> Fields statistics for the current field - sum of total term frequencies -<10> Fields statistics for the current field - sum of document frequencies -<11> Terms for the current field -<12> The name of the term -<13> Term frequency of the term -<14> Document frequency of the term -<15> Total term frequency of the term -<16> Score of the term -<17> Tokens of the term -<18> Position of the token -<19> Start offset of the token -<20> End offset of the token -<21> Payload of the token \ No newline at end of file From d94406a68af6d0d293fed7845c1efcfd49d84516 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Wed, 24 Oct 2018 17:01:22 -0400 Subject: [PATCH 46/67] HLRC: Deactivate Watch API (#34192) Relates to #29827 --- .../elasticsearch/client/WatcherClient.java | 31 +++++++++ .../client/WatcherRequestConverters.java | 12 ++++ .../watcher/DeactivateWatchRequest.java | 43 ++++++++++++ .../watcher/DeactivateWatchResponse.java | 65 +++++++++++++++++++ .../org/elasticsearch/client/WatcherIT.java | 21 ++++++ .../client/WatcherRequestConvertersTests.java | 10 +++ .../documentation/WatcherDocumentationIT.java | 55 ++++++++++++++++ .../watcher/DeactivateWatchRequestTests.java | 41 ++++++++++++ .../watcher/DeactivateWatchResponseTests.java | 58 +++++++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../watcher/deactivate-watch.asciidoc | 10 +++ 11 files changed, 348 insertions(+) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchResponse.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchResponseTests.java create mode 100644 docs/java-rest/high-level/watcher/deactivate-watch.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java index 5378859a999..a2b11772c12 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java @@ -19,6 +19,8 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.watcher.DeactivateWatchRequest; +import org.elasticsearch.client.watcher.DeactivateWatchResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.watcher.ActivateWatchRequest; import org.elasticsearch.client.watcher.ActivateWatchResponse; @@ -125,6 +127,35 @@ public final class WatcherClient { PutWatchResponse::fromXContent, listener, emptySet()); } + /** + * Deactivate an existing watch + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeactivateWatchResponse deactivateWatch(DeactivateWatchRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::deactivateWatch, options, + DeactivateWatchResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously deactivate an existing watch + * See + * the docs for more. + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deactivateWatchAsync(DeactivateWatchRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::deactivateWatch, options, + DeactivateWatchResponse::fromXContent, listener, emptySet()); + } + /** * Deletes a watch from the cluster * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java index 64ca53376d7..49764025273 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java @@ -24,6 +24,7 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; +import org.elasticsearch.client.watcher.DeactivateWatchRequest; import org.elasticsearch.client.watcher.ActivateWatchRequest; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.StartWatchServiceRequest; @@ -75,6 +76,17 @@ final class WatcherRequestConverters { return request; } + static Request deactivateWatch(DeactivateWatchRequest deactivateWatchRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(deactivateWatchRequest.getWatchId()) + .addPathPartAsIs("_deactivate") + .build(); + return new Request(HttpPut.METHOD_NAME, endpoint); + } + static Request deleteWatch(DeleteWatchRequest deleteWatchRequest) { String endpoint = new RequestConverters.EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchRequest.java new file mode 100644 index 00000000000..b20a56c361f --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchRequest.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.watcher; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; + +import java.util.Objects; + +public class DeactivateWatchRequest implements Validatable { + private final String watchId; + + public DeactivateWatchRequest(String watchId) { + + Objects.requireNonNull(watchId, "watch id is missing"); + if (PutWatchRequest.isValidId(watchId) == false) { + throw new IllegalArgumentException("watch id contains whitespace"); + } + + this.watchId = watchId; + } + + public String getWatchId() { + return watchId; + } +} + diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchResponse.java new file mode 100644 index 00000000000..08edd211d5b --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/DeactivateWatchResponse.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.watcher; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class DeactivateWatchResponse { + private WatchStatus status; + + private static final ParseField STATUS_FIELD = new ParseField("status"); + private static final ConstructingObjectParser PARSER + = new ConstructingObjectParser<>("x_pack_deactivate_watch_response", true, + (fields) -> new DeactivateWatchResponse((WatchStatus) fields[0])); + static { + PARSER.declareObject(ConstructingObjectParser.constructorArg(), + (parser, context) -> WatchStatus.parse(parser), + STATUS_FIELD); + } + + public static DeactivateWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public DeactivateWatchResponse(WatchStatus status) { + this.status = status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeactivateWatchResponse that = (DeactivateWatchResponse) o; + return Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(status); + } + + public WatchStatus getStatus() { + return status; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java index b1d3dc0103c..b069d211b2e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java @@ -19,6 +19,10 @@ package org.elasticsearch.client; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.client.watcher.DeactivateWatchRequest; +import org.elasticsearch.client.watcher.DeactivateWatchResponse; +import org.elasticsearch.client.watcher.ActivateWatchRequest; +import org.elasticsearch.client.watcher.ActivateWatchResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.AckWatchResponse; @@ -73,6 +77,23 @@ public class WatcherIT extends ESRestHighLevelClientTestCase { return highLevelClient().watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); } + public void testDeactivateWatch() throws Exception { + // Deactivate a watch that exists + String watchId = randomAlphaOfLength(10); + createWatch(watchId); + DeactivateWatchResponse response = highLevelClient().watcher().deactivateWatch( + new DeactivateWatchRequest(watchId), RequestOptions.DEFAULT); + assertThat(response.getStatus().state().isActive(), is(false)); + } + public void testDeactivateWatch404() throws Exception { + // Deactivate a watch that does not exist + String watchId = randomAlphaOfLength(10); + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, + () -> highLevelClient().watcher().deactivateWatch(new DeactivateWatchRequest(watchId), RequestOptions.DEFAULT)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + + } + public void testDeleteWatch() throws Exception { // delete watch that exists { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java index 1c422c2b8ec..df6f697fb97 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.watcher.DeactivateWatchRequest; import org.elasticsearch.client.watcher.ActivateWatchRequest; import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.StartWatchServiceRequest; @@ -83,6 +84,15 @@ public class WatcherRequestConvertersTests extends ESTestCase { assertThat(bos.toString("UTF-8"), is(body)); } + public void testDeactivateWatch() { + String watchId = randomAlphaOfLength(10); + DeactivateWatchRequest deactivateWatchRequest = new DeactivateWatchRequest(watchId); + Request request = WatcherRequestConverters.deactivateWatch(deactivateWatchRequest); + + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId + "/_deactivate", request.getEndpoint()); + } + public void testDeleteWatch() { DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); String watchId = randomAlphaOfLength(10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java index b9562754e91..165bda95dfc 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java @@ -32,6 +32,8 @@ import org.elasticsearch.client.watcher.AckWatchRequest; import org.elasticsearch.client.watcher.AckWatchResponse; import org.elasticsearch.client.watcher.ActionStatus; import org.elasticsearch.client.watcher.ActionStatus.AckStatus; +import org.elasticsearch.client.watcher.DeactivateWatchRequest; +import org.elasticsearch.client.watcher.DeactivateWatchResponse; import org.elasticsearch.client.watcher.StartWatchServiceRequest; import org.elasticsearch.client.watcher.StopWatchServiceRequest; import org.elasticsearch.client.watcher.WatchStatus; @@ -47,6 +49,8 @@ import org.elasticsearch.rest.RestStatus; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.is; + public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { public void testStartStopWatchService() throws Exception { @@ -297,6 +301,57 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase { } } + public void testDeactivateWatch() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + BytesReference watch = new BytesArray("{ \n" + + " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" + + " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" + + " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" + + "}"); + PutWatchRequest putWatchRequest = new PutWatchRequest("my_watch_id", watch, XContentType.JSON); + client.watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT); + } + + { + //tag::deactivate-watch-execute + DeactivateWatchRequest request = new DeactivateWatchRequest("my_watch_id"); + DeactivateWatchResponse response = client.watcher().deactivateWatch(request, RequestOptions.DEFAULT); + //end::deactivate-watch-execute + + assertThat(response.getStatus().state().isActive(), is(false)); + } + + { + DeactivateWatchRequest request = new DeactivateWatchRequest("my_watch_id"); + // tag::deactivate-watch-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(DeactivateWatchResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::deactivate-watch-execute-listener + + // For testing, replace the empty listener by a blocking listener. + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::deactivate-watch-execute-async + client.watcher().deactivateWatchAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::deactivate-watch-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testActivateWatch() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchRequestTests.java new file mode 100644 index 00000000000..d92a51f96c2 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchRequestTests.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.watcher; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class DeactivateWatchRequestTests extends ESTestCase { + + public void testNullId() { + NullPointerException actual = expectThrows(NullPointerException.class, () -> new DeactivateWatchRequest(null)); + assertNotNull(actual); + assertThat(actual.getMessage(), is("watch id is missing")); + } + + public void testInvalidId() { + IllegalArgumentException actual = expectThrows(IllegalArgumentException.class, + () -> new DeactivateWatchRequest("Watch id has spaces")); + assertNotNull(actual); + assertThat(actual.getMessage(), is("watch id contains whitespace")); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchResponseTests.java new file mode 100644 index 00000000000..dd56c8b054e --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/DeactivateWatchResponseTests.java @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.watcher; + + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class DeactivateWatchResponseTests extends ESTestCase { + + public void testBasicParsing() throws IOException { + XContentType contentType = randomFrom(XContentType.values()); + int version = randomInt(); + ExecutionState executionState = randomFrom(ExecutionState.values()); + XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject() + .startObject("status") + .field("version", version) + .field("execution_state", executionState) + .endObject() + .endObject(); + BytesReference bytes = BytesReference.bytes(builder); + DeactivateWatchResponse response = parse(contentType, bytes); + WatchStatus status = response.getStatus(); + assertNotNull(status); + assertEquals(version, status.version()); + assertEquals(executionState, status.getExecutionState()); + } + + private DeactivateWatchResponse parse(XContentType contentType, BytesReference bytes) throws IOException { + XContentParser parser = XContentFactory.xContent(contentType) + .createParser(NamedXContentRegistry.EMPTY, null, bytes.streamInput()); + parser.nextToken(); + return DeactivateWatchResponse.fromXContent(parser); + } +} diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 5801d79a5b7..c8d16954abe 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -346,6 +346,7 @@ The Java High Level REST Client supports the following Watcher APIs: * <<{upid}-stop-watch-service>> * <> * <> +* <> * <<{upid}-ack-watch>> * <<{upid}-activate-watch>> @@ -354,6 +355,7 @@ include::watcher/stop-watch-service.asciidoc[] include::watcher/put-watch.asciidoc[] include::watcher/delete-watch.asciidoc[] include::watcher/ack-watch.asciidoc[] +include::watcher/deactivate-watch.asciidoc[] include::watcher/activate-watch.asciidoc[] == Graph APIs diff --git a/docs/java-rest/high-level/watcher/deactivate-watch.asciidoc b/docs/java-rest/high-level/watcher/deactivate-watch.asciidoc new file mode 100644 index 00000000000..673423b69b9 --- /dev/null +++ b/docs/java-rest/high-level/watcher/deactivate-watch.asciidoc @@ -0,0 +1,10 @@ +-- +:api: deactivate-watch +:request: deactivateWatchRequet +:response: deactivateWatchResponse +:doc-tests-file: {doc-tests}/WatcherDocumentationIT.java +-- +[[java-rest-high-watcher-deactivate-watch]] +=== Deactivate Watch API + +include::../execution.asciidoc[] From 6fe0e62b7a29242cd19ec54bac0364e0e04e4be5 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 24 Oct 2018 23:48:49 +0200 Subject: [PATCH 47/67] [CCR] Added write buffer size limit (#34797) This limit is based on the size in bytes of the operations in the write buffer. If this limit is exceeded then no more read operations will be coordinated until the size in bytes of the write buffer has dropped below the configured write buffer size limit. Renamed existing `max_write_buffer_size` to ``max_write_buffer_count` to indicate that limit is count based. Closes #34705 --- .../ccr/action/AutoFollowCoordinator.java | 1 + .../xpack/ccr/action/ShardFollowNodeTask.java | 18 ++- .../xpack/ccr/action/ShardFollowTask.java | 36 ++++-- .../TransportPutAutoFollowPatternAction.java | 1 + .../action/TransportResumeFollowAction.java | 13 ++- .../elasticsearch/xpack/CcrIntegTestCase.java | 16 +++ .../xpack/CcrSingleNodeTestCase.java | 15 +++ .../elasticsearch/xpack/ccr/AutoFollowIT.java | 8 +- .../xpack/ccr/AutoFollowMetadataTests.java | 1 + .../elasticsearch/xpack/ccr/CcrLicenseIT.java | 2 +- .../xpack/ccr/IndexFollowingIT.java | 2 +- .../xpack/ccr/LocalIndexFollowingIT.java | 1 + .../action/AutoFollowCoordinatorTests.java | 16 +-- .../GetAutoFollowPatternResponseTests.java | 1 + .../PutAutoFollowPatternRequestTests.java | 5 +- .../ResumeFollowActionRequestTests.java | 5 +- .../ShardFollowNodeTaskRandomTests.java | 3 + .../ShardFollowNodeTaskStatusTests.java | 1 + .../ccr/action/ShardFollowNodeTaskTests.java | 107 +++++++++++++----- .../ShardFollowTaskReplicationTests.java | 4 +- .../ccr/action/ShardFollowTaskTests.java | 1 + .../xpack/ccr/action/StatsResponsesTests.java | 1 + ...ortDeleteAutoFollowPatternActionTests.java | 6 +- ...nsportGetAutoFollowPatternActionTests.java | 8 +- ...nsportPutAutoFollowPatternActionTests.java | 2 +- .../action/TransportUnfollowActionTests.java | 3 + .../ccr/FollowStatsMonitoringDocTests.java | 6 +- .../xpack/core/ccr/AutoFollowMetadata.java | 41 +++++-- .../core/ccr/ShardFollowNodeTaskStatus.java | 25 +++- .../action/PutAutoFollowPatternAction.java | 35 ++++-- .../core/ccr/action/PutFollowAction.java | 8 +- .../core/ccr/action/ResumeFollowAction.java | 44 +++++-- .../src/main/resources/monitoring-es.json | 3 + 33 files changed, 342 insertions(+), 97 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index a05dc0914e5..b5ba39ae7e2 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -328,6 +328,7 @@ public class AutoFollowCoordinator implements ClusterStateApplier { followRequest.setMaxConcurrentReadBatches(pattern.getMaxConcurrentReadBatches()); followRequest.setMaxBatchSize(pattern.getMaxBatchSize()); followRequest.setMaxConcurrentWriteBatches(pattern.getMaxConcurrentWriteBatches()); + followRequest.setMaxWriteBufferCount(pattern.getMaxWriteBufferCount()); followRequest.setMaxWriteBufferSize(pattern.getMaxWriteBufferSize()); followRequest.setMaxRetryDelay(pattern.getMaxRetryDelay()); followRequest.setPollTimeout(pattern.getPollTimeout()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 19843ac4efb..9788195c7e5 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -85,6 +85,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private long numberOfOperationsIndexed = 0; private long lastFetchTime = -1; private final Queue buffer = new PriorityQueue<>(Comparator.comparing(Translog.Operation::seqNo)); + private long bufferSizeInBytes = 0; private final LinkedHashMap> fetchExceptions; private volatile ElasticsearchException fatalException; @@ -183,8 +184,12 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { params.getFollowShardId(), numConcurrentReads); return false; } - if (buffer.size() > params.getMaxWriteBufferSize()) { - LOGGER.trace("{} no new reads, buffer limit has been reached [{}]", params.getFollowShardId(), buffer.size()); + if (bufferSizeInBytes >= params.getMaxWriteBufferSize().getBytes()) { + LOGGER.trace("{} no new reads, buffer size limit has been reached [{}]", params.getFollowShardId(), bufferSizeInBytes); + return false; + } + if (buffer.size() > params.getMaxWriteBufferCount()) { + LOGGER.trace("{} no new reads, buffer count limit has been reached [{}]", params.getFollowShardId(), buffer.size()); return false; } return true; @@ -208,6 +213,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { break; } } + bufferSizeInBytes -= sumEstimatedSize; numConcurrentWrites++; LOGGER.trace("{}[{}] write [{}/{}] [{}]", params.getFollowShardId(), numConcurrentWrites, ops.get(0).seqNo(), ops.get(ops.size() - 1).seqNo(), ops.size()); @@ -281,7 +287,12 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { } else { assert response.getOperations()[0].seqNo() == from : "first operation is not what we asked for. From is [" + from + "], got " + response.getOperations()[0]; - buffer.addAll(Arrays.asList(response.getOperations())); + List operations = Arrays.asList(response.getOperations()); + long operationsSize = operations.stream() + .mapToLong(Translog.Operation::estimateSize) + .sum(); + buffer.addAll(operations); + bufferSizeInBytes += operationsSize; final long maxSeqNo = response.getOperations()[response.getOperations().length - 1].seqNo(); assert maxSeqNo == Arrays.stream(response.getOperations()).mapToLong(Translog.Operation::seqNo).max().getAsLong(); @@ -455,6 +466,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { numConcurrentReads, numConcurrentWrites, buffer.size(), + bufferSizeInBytes, currentMappingVersion, totalFetchTimeMillis, totalFetchTookTimeMillis, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java index ea75ee2d9e1..13e3da77491 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java @@ -48,6 +48,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); public static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + public static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count"); public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); @@ -56,7 +57,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, (a) -> new ShardFollowTask((String) a[0], new ShardId((String) a[1], (String) a[2], (int) a[3]), new ShardId((String) a[4], (String) a[5], (int) a[6]), (int) a[7], (int) a[8], (ByteSizeValue) a[9], - (int) a[10], (int) a[11], (TimeValue) a[12], (TimeValue) a[13], (Map) a[14])); + (int) a[10], (int) a[11], (ByteSizeValue) a[12], (TimeValue) a[13], (TimeValue) a[14], (Map) a[15])); static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REMOTE_CLUSTER_FIELD); @@ -74,7 +75,12 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { MAX_BATCH_SIZE, ObjectParser.ValueType.STRING); PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_CONCURRENT_WRITE_BATCHES); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_WRITE_BUFFER_SIZE); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_WRITE_BUFFER_COUNT); + PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_BUFFER_SIZE.getPreferredName()), + MAX_WRITE_BUFFER_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY.getPreferredName()), MAX_RETRY_DELAY, ObjectParser.ValueType.STRING); @@ -91,7 +97,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { private final int maxConcurrentReadBatches; private final ByteSizeValue maxBatchSize; private final int maxConcurrentWriteBatches; - private final int maxWriteBufferSize; + private final int maxWriteBufferCount; + private final ByteSizeValue maxWriteBufferSize; private final TimeValue maxRetryDelay; private final TimeValue pollTimeout; private final Map headers; @@ -104,7 +111,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { final int maxConcurrentReadBatches, final ByteSizeValue maxBatchSize, final int maxConcurrentWriteBatches, - final int maxWriteBufferSize, + final int maxWriteBufferCount, + final ByteSizeValue maxWriteBufferSize, final TimeValue maxRetryDelay, final TimeValue pollTimeout, final Map headers) { @@ -115,6 +123,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { this.maxConcurrentReadBatches = maxConcurrentReadBatches; this.maxBatchSize = maxBatchSize; this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; + this.maxWriteBufferCount = maxWriteBufferCount; this.maxWriteBufferSize = maxWriteBufferSize; this.maxRetryDelay = maxRetryDelay; this.pollTimeout = pollTimeout; @@ -129,7 +138,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { this.maxConcurrentReadBatches = in.readVInt(); this.maxBatchSize = new ByteSizeValue(in); this.maxConcurrentWriteBatches = in.readVInt(); - this.maxWriteBufferSize = in.readVInt(); + this.maxWriteBufferCount = in.readVInt(); + this.maxWriteBufferSize = new ByteSizeValue(in); this.maxRetryDelay = in.readTimeValue(); this.pollTimeout = in.readTimeValue(); this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); @@ -159,7 +169,11 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { return maxConcurrentWriteBatches; } - public int getMaxWriteBufferSize() { + public int getMaxWriteBufferCount() { + return maxWriteBufferCount; + } + + public ByteSizeValue getMaxWriteBufferSize() { return maxWriteBufferSize; } @@ -197,7 +211,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { out.writeVInt(maxConcurrentReadBatches); maxBatchSize.writeTo(out); out.writeVInt(maxConcurrentWriteBatches); - out.writeVInt(maxWriteBufferSize); + out.writeVInt(maxWriteBufferCount); + maxWriteBufferSize.writeTo(out); out.writeTimeValue(maxRetryDelay); out.writeTimeValue(pollTimeout); out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString); @@ -221,7 +236,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); - builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); + builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); + builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep()); builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay.getStringRep()); builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); builder.field(HEADERS.getPreferredName(), headers); @@ -240,7 +256,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { maxConcurrentReadBatches == that.maxConcurrentReadBatches && maxConcurrentWriteBatches == that.maxConcurrentWriteBatches && maxBatchSize.equals(that.maxBatchSize) && - maxWriteBufferSize == that.maxWriteBufferSize && + maxWriteBufferCount == that.maxWriteBufferCount && + maxWriteBufferSize.equals(that.maxWriteBufferSize) && Objects.equals(maxRetryDelay, that.maxRetryDelay) && Objects.equals(pollTimeout, that.pollTimeout) && Objects.equals(headers, that.headers); @@ -256,6 +273,7 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { maxConcurrentReadBatches, maxConcurrentWriteBatches, maxBatchSize, + maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, pollTimeout, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index 8832275f9a9..79f1ed7a2ee 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -164,6 +164,7 @@ public class TransportPutAutoFollowPatternAction extends request.getMaxConcurrentReadBatches(), request.getMaxBatchSize(), request.getMaxConcurrentWriteBatches(), + request.getMaxWriteBufferCount(), request.getMaxWriteBufferSize(), request.getMaxRetryDelay(), request.getPollTimeout()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index 53ac116d38e..97905f92721 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -55,7 +55,8 @@ public class TransportResumeFollowAction extends HandledTransportAction { + FollowStatsAction.StatsResponses statsResponses = + leaderClient().execute(FollowStatsAction.INSTANCE, new FollowStatsAction.StatsRequest()).actionGet(); + for (FollowStatsAction.StatsResponse statsResponse : statsResponses.getStatsResponses()) { + ShardFollowNodeTaskStatus status = statsResponse.status(); + assertThat(status.numberOfQueuedWrites(), equalTo(0)); + assertThat(status.bufferSize(), equalTo(0L)); + } + }); + } + static void removeCCRRelatedMetadataFromClusterState(ClusterService clusterService) throws Exception { CountDownLatch latch = new CountDownLatch(1); clusterService.submitStateUpdateTask("remove-ccr-related-metadata", new ClusterStateUpdateTask() { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java index 169c009207c..611fb0c27fa 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrSingleNodeTestCase.java @@ -16,6 +16,8 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.ccr.LocalStateCcr; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; +import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.PutFollowAction; import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.junit.After; @@ -26,6 +28,7 @@ import java.util.Collections; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.CcrIntegTestCase.removeCCRRelatedMetadataFromClusterState; +import static org.hamcrest.Matchers.equalTo; public abstract class CcrSingleNodeTestCase extends ESSingleNodeTestCase { @@ -80,4 +83,16 @@ public abstract class CcrSingleNodeTestCase extends ESSingleNodeTestCase { return request; } + protected void ensureEmptyWriteBuffers() throws Exception { + assertBusy(() -> { + FollowStatsAction.StatsResponses statsResponses = + client().execute(FollowStatsAction.INSTANCE, new FollowStatsAction.StatsRequest()).actionGet(); + for (FollowStatsAction.StatsResponse statsResponse : statsResponses.getStatsResponses()) { + ShardFollowNodeTaskStatus status = statsResponse.status(); + assertThat(status.numberOfQueuedWrites(), equalTo(0)); + assertThat(status.bufferSize(), equalTo(0L)); + } + }); + } + } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index 8b3de8f5f8d..50e6008d91d 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -126,7 +126,7 @@ public class AutoFollowIT extends CcrIntegTestCase { // Need to set this, because following an index in the same cluster request.setFollowIndexNamePattern("copy-{{leader_index}}"); if (randomBoolean()) { - request.setMaxWriteBufferSize(randomIntBetween(0, Integer.MAX_VALUE)); + request.setMaxWriteBufferCount(randomIntBetween(0, Integer.MAX_VALUE)); } if (randomBoolean()) { request.setMaxConcurrentReadBatches(randomIntBetween(0, Integer.MAX_VALUE)); @@ -137,6 +137,9 @@ public class AutoFollowIT extends CcrIntegTestCase { if (randomBoolean()) { request.setMaxBatchOperationCount(randomIntBetween(0, Integer.MAX_VALUE)); } + if (randomBoolean()) { + request.setMaxWriteBufferSize(new ByteSizeValue(randomNonNegativeLong())); + } if (randomBoolean()) { request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } @@ -157,6 +160,9 @@ public class AutoFollowIT extends CcrIntegTestCase { ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTasksMetaData.tasks().iterator().next().getParams(); assertThat(shardFollowTask.getLeaderShardId().getIndexName(), equalTo("logs-201901")); assertThat(shardFollowTask.getFollowShardId().getIndexName(), equalTo("copy-logs-201901")); + if (request.getMaxWriteBufferCount() != null) { + assertThat(shardFollowTask.getMaxWriteBufferCount(), equalTo(request.getMaxWriteBufferCount())); + } if (request.getMaxWriteBufferSize() != null) { assertThat(shardFollowTask.getMaxWriteBufferSize(), equalTo(request.getMaxWriteBufferSize())); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java index 67071bd1be5..0ca175cef82 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/AutoFollowMetadataTests.java @@ -49,6 +49,7 @@ public class AutoFollowMetadataTests extends AbstractSerializingTestCase patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -121,7 +121,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { when(client.getRemoteClusterClient(anyString())).thenReturn(client); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -179,7 +179,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -242,7 +242,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -296,7 +296,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { public void testGetLeaderIndicesToFollow() { AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); + new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null, null); Map> headers = new HashMap<>(); ClusterState followerState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, @@ -341,15 +341,15 @@ public class AutoFollowCoordinatorTests extends ESTestCase { public void testGetFollowerIndexName() { AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, - null, null, null, null, null, null); + null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("metrics-0")); autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-metrics-0", null, null, - null, null, null, null, null); + null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null, - null, null, null, null, null, null); + null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java index e67509f7ee8..301dabeef89 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java @@ -37,6 +37,7 @@ public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCas new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES), randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE), + new ByteSizeValue(randomNonNegativeLong()), TimeValue.timeValueMillis(500), TimeValue.timeValueMillis(500)); patterns.put(randomAlphaOfLength(4), autoFollowPattern); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java index 67957d1e366..e4e365312ad 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java @@ -66,7 +66,10 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { - request.setMaxWriteBufferSize(randomIntBetween(0, Integer.MAX_VALUE)); + request.setMaxWriteBufferCount(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxWriteBufferSize(new ByteSizeValue(randomNonNegativeLong())); } return request; } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java index 55c0c79e5b2..122082537fd 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java @@ -57,7 +57,10 @@ public class ResumeFollowActionRequestTests extends AbstractStreamableXContentTe request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { - request.setMaxWriteBufferSize(randomIntBetween(1, Integer.MAX_VALUE)); + request.setMaxWriteBufferCount(randomIntBetween(1, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxWriteBufferSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { request.setMaxRetryDelay(TimeValue.timeValueMillis(500)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java index 50c0dd9ca49..8576bc28905 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ccr.action; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.seqno.LocalCheckpointTracker; import org.elasticsearch.index.shard.ShardId; @@ -83,6 +85,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE, concurrency, 10240, + new ByteSizeValue(512, ByteSizeUnit.MB), TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), Collections.emptyMap() diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java index a3881a6728f..93d9556d0e4 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java @@ -57,6 +57,7 @@ public class ShardFollowNodeTaskStatusTests extends AbstractSerializingTestCase< randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomReadExceptions(), randomLong(), randomBoolean() ? new ElasticsearchException("fatal error") : null); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java index 1988513c95d..a1582d4c2f1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java @@ -63,7 +63,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { private Queue responseSizes; public void testCoordinateReads() { - ShardFollowNodeTask task = createShardFollowTask(8, between(8, 20), between(1, 20), Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = createShardFollowTask(8, between(8, 20), between(1, 20), Integer.MAX_VALUE, + new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 3, -1); task.coordinateReads(); assertThat(shardChangesRequests, contains(new long[]{0L, 8L})); // treat this a peak request @@ -77,9 +78,10 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(status.lastRequestedSeqNo(), equalTo(60L)); } - public void testWriteBuffer() { + public void testMaxWriteBufferCount() { // Need to set concurrentWrites to 0, other the write buffer gets flushed immediately: - ShardFollowNodeTask task = createShardFollowTask(64, 1, 0, 32, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 0, 32, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -90,7 +92,30 @@ public class ShardFollowNodeTaskTests extends ESTestCase { shardChangesRequests.clear(); // Also invokes the coordinatesReads() method: task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 128L)); - assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because write buffer is full + assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because write buffer count limit has been reached + + ShardFollowNodeTaskStatus status = task.getStatus(); + assertThat(status.numberOfConcurrentReads(), equalTo(0)); + assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.lastRequestedSeqNo(), equalTo(63L)); + assertThat(status.leaderGlobalCheckpoint(), equalTo(128L)); + } + + public void testMaxWriteBufferSize() { + // Need to set concurrentWrites to 0, other the write buffer gets flushed immediately: + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 0, Integer.MAX_VALUE, new ByteSizeValue(1, ByteSizeUnit.KB), Long.MAX_VALUE); + startTask(task, 63, -1); + + task.coordinateReads(); + assertThat(shardChangesRequests.size(), equalTo(1)); + assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); + assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); + + shardChangesRequests.clear(); + // Also invokes the coordinatesReads() method: + task.innerHandleReadResponse(0L, 63L, generateShardChangesResponse(0, 63, 0L, 128L)); + assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because write buffer size limit has been reached ShardFollowNodeTaskStatus status = task.getStatus(); assertThat(status.numberOfConcurrentReads(), equalTo(0)); @@ -100,7 +125,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testMaxConcurrentReads() { - ShardFollowNodeTask task = createShardFollowTask(8, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(8, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 64, -1); task.coordinateReads(); @@ -114,7 +140,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testTaskCancelled() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 64, -1); task.coordinateReads(); @@ -131,7 +158,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testTaskCancelledAfterReadLimitHasBeenReached() { - ShardFollowNodeTask task = createShardFollowTask(16, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(16, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 31, -1); task.coordinateReads(); @@ -155,7 +183,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testTaskCancelledAfterWriteBufferLimitHasBeenReached() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, 32, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, 32, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 64, -1); task.coordinateReads(); @@ -179,7 +208,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testReceiveRetryableError() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); int max = randomIntBetween(1, 30); @@ -229,7 +259,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testEmptyShardChangesResponseShouldClearFetchException() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, -1, -1); readFailures.add(new ShardNotFoundException(new ShardId("leader_index", "", 0))); @@ -258,7 +289,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testReceiveTimeout() { - final ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + final ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); final int numberOfTimeouts = randomIntBetween(1, 32); @@ -322,7 +354,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testReceiveNonRetryableError() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); Exception failure = new RuntimeException("replication failed"); @@ -362,7 +395,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testHandleReadResponse() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -383,7 +417,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testReceiveLessThanRequested() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -407,7 +442,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testCancelAndReceiveLessThanRequested() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -430,7 +466,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testReceiveNothingExpectedSomething() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -453,7 +490,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testMappingUpdate() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); mappingVersions.add(1L); @@ -474,7 +512,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testMappingUpdateRetryableError() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); int max = randomIntBetween(1, 30); @@ -499,7 +538,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testMappingUpdateNonRetryableError() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); mappingUpdateFailures.add(new RuntimeException()); @@ -518,7 +558,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testCoordinateWrites() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -542,7 +583,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testMaxConcurrentWrites() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 2, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 2, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -554,7 +596,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { ShardFollowNodeTaskStatus status = task.getStatus(); assertThat(status.numberOfConcurrentWrites(), equalTo(2)); - task = createShardFollowTask(64, 1, 4, Integer.MAX_VALUE, Long.MAX_VALUE); + task = createShardFollowTask(64, 1, 4, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); response = generateShardChangesResponse(0, 256, 0L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -570,7 +612,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testMaxBatchOperationCount() { - ShardFollowNodeTask task = createShardFollowTask(8, 1, 32, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(8, 1, 32, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -586,7 +629,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testRetryableError() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -614,7 +658,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testNonRetryableError() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -636,7 +681,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testMaxBatchBytesLimit() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 128, Integer.MAX_VALUE, 1L); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 128, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), 1L); startTask(task, 64, -1); task.coordinateReads(); @@ -652,7 +698,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testHandleWriteResponse() { - ShardFollowNodeTask task = createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, Long.MAX_VALUE); + ShardFollowNodeTask task = + createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); startTask(task, 63, -1); task.coordinateReads(); @@ -698,7 +745,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { private ShardFollowNodeTask createShardFollowTask(int maxBatchOperationCount, int maxConcurrentReadBatches, int maxConcurrentWriteBatches, - int bufferWriteLimit, + int maxWriteBufferCount, + ByteSizeValue maxWriteBufferSize, long maxBatchSizeInBytes) { AtomicBoolean stopped = new AtomicBoolean(false); ShardFollowTask params = new ShardFollowTask( @@ -709,7 +757,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { maxConcurrentReadBatches, new ByteSizeValue(maxBatchSizeInBytes, ByteSizeUnit.BYTES), maxConcurrentWriteBatches, - bufferWriteLimit, + maxWriteBufferCount, + maxWriteBufferSize, TimeValue.ZERO, TimeValue.ZERO, Collections.emptyMap() diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index 96bc2f04f59..07c3121eba4 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -370,7 +370,9 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest between(1, 64), between(1, 8), new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES), - between(1, 4), 10240, + between(1, 4), + 10240, + new ByteSizeValue(512, ByteSizeUnit.MB), TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), Collections.emptyMap() diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java index 865d18e6067..e955f77d733 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java @@ -34,6 +34,7 @@ public class ShardFollowTaskTests extends AbstractSerializingTestCase existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("name1", - new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); @@ -44,7 +44,7 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { List existingPatterns = new ArrayList<>(); existingPatterns.add("logs-*"); existingAutoFollowPatterns.put("name2", - new AutoFollowPattern("asia_cluster", existingPatterns, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("asia_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); @@ -78,7 +78,7 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("name1", - new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); existingHeaders.put("key", Collections.singletonMap("key", "val")); } ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java index ffc2d115091..128474bbc30 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java @@ -23,10 +23,10 @@ public class TransportGetAutoFollowPatternActionTests extends ESTestCase { public void testGetAutoFollowPattern() { Map patterns = new HashMap<>(); - patterns.put("name1", - new AutoFollowPattern("test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); - patterns.put("name2", - new AutoFollowPattern("test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); + patterns.put("name1", new AutoFollowPattern( + "test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null, null)); + patterns.put("name2", new AutoFollowPattern( + "test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null, null)); MetaData metaData = MetaData.builder() .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())) .build(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java index 6d4ef138fb4..433ef402af8 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java @@ -103,7 +103,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("name1", - new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java index 07b0fc078ac..82cbe2622b7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java @@ -11,6 +11,8 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; @@ -81,6 +83,7 @@ public class TransportUnfollowActionTests extends ESTestCase { TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE, 1, 10240, + new ByteSizeValue(512, ByteSizeUnit.MB), TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), Collections.emptyMap() diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index 4b36005de36..9b4ed7c8a97 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -92,6 +92,7 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase entry : serializedStatus.entrySet()) { String fieldName = entry.getKey(); Map fieldMapping = (Map) followStatsMapping.get(fieldName); - assertThat(fieldMapping, notNullValue()); + assertThat("no field mapping for field [" + fieldName + "]", fieldMapping, notNullValue()); Object fieldValue = entry.getValue(); String fieldType = (String) fieldMapping.get("type"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index 5234151010c..8172612b78f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -182,6 +182,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); public static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + public static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count"); public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); @@ -190,8 +191,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("auto_follow_pattern", args -> new AutoFollowPattern((String) args[0], (List) args[1], (String) args[2], (Integer) args[3], - (Integer) args[4], (ByteSizeValue) args[5], (Integer) args[6], (Integer) args[7], (TimeValue) args[8], - (TimeValue) args[9])); + (Integer) args[4], (ByteSizeValue) args[5], (Integer) args[6], (Integer) args[7], (ByteSizeValue) args[8], + (TimeValue) args[9], (TimeValue) args[10])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), REMOTE_CLUSTER_FIELD); @@ -205,7 +206,12 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i MAX_BATCH_SIZE, ObjectParser.ValueType.STRING); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_CONCURRENT_WRITE_BATCHES); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_WRITE_BUFFER_SIZE); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_WRITE_BUFFER_COUNT); + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_BUFFER_SIZE.getPreferredName()), + MAX_WRITE_BUFFER_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY.getPreferredName()), MAX_RETRY_DELAY, ObjectParser.ValueType.STRING); @@ -221,7 +227,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i private final Integer maxConcurrentReadBatches; private final ByteSizeValue maxBatchSize; private final Integer maxConcurrentWriteBatches; - private final Integer maxWriteBufferSize; + private final Integer maxWriteBufferCount; + private final ByteSizeValue maxWriteBufferSize; private final TimeValue maxRetryDelay; private final TimeValue pollTimeout; @@ -232,8 +239,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i Integer maxConcurrentReadBatches, ByteSizeValue maxBatchSize, Integer maxConcurrentWriteBatches, - Integer maxWriteBufferSize, - TimeValue maxRetryDelay, + Integer maxWriteBufferCount, + ByteSizeValue maxWriteBufferSize, TimeValue maxRetryDelay, TimeValue pollTimeout) { this.remoteCluster = remoteCluster; this.leaderIndexPatterns = leaderIndexPatterns; @@ -242,6 +249,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i this.maxConcurrentReadBatches = maxConcurrentReadBatches; this.maxBatchSize = maxBatchSize; this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; + this.maxWriteBufferCount = maxWriteBufferCount; this.maxWriteBufferSize = maxWriteBufferSize; this.maxRetryDelay = maxRetryDelay; this.pollTimeout = pollTimeout; @@ -255,7 +263,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i maxConcurrentReadBatches = in.readOptionalVInt(); maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentWriteBatches = in.readOptionalVInt(); - maxWriteBufferSize = in.readOptionalVInt(); + maxWriteBufferCount = in.readOptionalVInt(); + maxWriteBufferSize = in.readOptionalWriteable(ByteSizeValue::new); maxRetryDelay = in.readOptionalTimeValue(); pollTimeout = in.readOptionalTimeValue(); } @@ -296,7 +305,11 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return maxConcurrentWriteBatches; } - public Integer getMaxWriteBufferSize() { + public Integer getMaxWriteBufferCount() { + return maxWriteBufferCount; + } + + public ByteSizeValue getMaxWriteBufferSize() { return maxWriteBufferSize; } @@ -317,7 +330,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i out.writeOptionalVInt(maxConcurrentReadBatches); out.writeOptionalWriteable(maxBatchSize); out.writeOptionalVInt(maxConcurrentWriteBatches); - out.writeOptionalVInt(maxWriteBufferSize); + out.writeOptionalVInt(maxWriteBufferCount); + out.writeOptionalWriteable(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); out.writeOptionalTimeValue(pollTimeout); } @@ -341,8 +355,11 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i if (maxConcurrentWriteBatches != null) { builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); } - if (maxWriteBufferSize != null){ - builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); + if (maxWriteBufferCount != null){ + builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); + } + if (maxWriteBufferSize != null) { + builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep()); } if (maxRetryDelay != null) { builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay); @@ -370,6 +387,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) && Objects.equals(maxBatchSize, that.maxBatchSize) && Objects.equals(maxConcurrentWriteBatches, that.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferCount, that.maxWriteBufferCount) && Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) && Objects.equals(maxRetryDelay, that.maxRetryDelay) && Objects.equals(pollTimeout, that.pollTimeout); @@ -385,6 +403,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i maxConcurrentReadBatches, maxBatchSize, maxConcurrentWriteBatches, + maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, pollTimeout); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java index 5869c78bc73..e2e907f80d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java @@ -46,6 +46,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { private static final ParseField NUMBER_OF_CONCURRENT_READS_FIELD = new ParseField("number_of_concurrent_reads"); private static final ParseField NUMBER_OF_CONCURRENT_WRITES_FIELD = new ParseField("number_of_concurrent_writes"); private static final ParseField NUMBER_OF_QUEUED_WRITES_FIELD = new ParseField("number_of_queued_writes"); + private static final ParseField BUFFER_SIZE_IN_BYTES_FIELD = new ParseField("buffer_size_in_bytes"); private static final ParseField MAPPING_VERSION_FIELD = new ParseField("mapping_version"); private static final ParseField TOTAL_FETCH_TIME_MILLIS_FIELD = new ParseField("total_fetch_time_millis"); private static final ParseField TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD = new ParseField("total_fetch_remote_time_millis"); @@ -89,12 +90,13 @@ public class ShardFollowNodeTaskStatus implements Task.Status { (long) args[20], (long) args[21], (long) args[22], + (long) args[23], new TreeMap<>( - ((List>>) args[23]) + ((List>>) args[24]) .stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), - (long) args[24], - (ElasticsearchException) args[25])); + (long) args[25], + (ElasticsearchException) args[26])); public static final String FETCH_EXCEPTIONS_ENTRY_PARSER_NAME = "shard-follow-node-task-status-fetch-exceptions-entry"; @@ -116,6 +118,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_CONCURRENT_READS_FIELD); STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_CONCURRENT_WRITES_FIELD); STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_QUEUED_WRITES_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUFFER_SIZE_IN_BYTES_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAPPING_VERSION_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_TIME_MILLIS_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD); @@ -219,6 +222,12 @@ public class ShardFollowNodeTaskStatus implements Task.Status { return numberOfQueuedWrites; } + private final long bufferSize; + + public long bufferSize() { + return bufferSize; + } + private final long mappingVersion; public long mappingVersion() { @@ -316,6 +325,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { final int numberOfConcurrentReads, final int numberOfConcurrentWrites, final int numberOfQueuedWrites, + final long bufferSize, final long mappingVersion, final long totalFetchTimeMillis, final long totalFetchRemoteTimeMillis, @@ -342,6 +352,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.numberOfConcurrentReads = numberOfConcurrentReads; this.numberOfConcurrentWrites = numberOfConcurrentWrites; this.numberOfQueuedWrites = numberOfQueuedWrites; + this.bufferSize = bufferSize; this.mappingVersion = mappingVersion; this.totalFetchTimeMillis = totalFetchTimeMillis; this.totalFetchRemoteTimeMillis = totalFetchRemoteTimeMillis; @@ -371,6 +382,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.numberOfConcurrentReads = in.readVInt(); this.numberOfConcurrentWrites = in.readVInt(); this.numberOfQueuedWrites = in.readVInt(); + this.bufferSize = in.readVLong(); this.mappingVersion = in.readVLong(); this.totalFetchTimeMillis = in.readVLong(); this.totalFetchRemoteTimeMillis = in.readVLong(); @@ -407,6 +419,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { out.writeVInt(numberOfConcurrentReads); out.writeVInt(numberOfConcurrentWrites); out.writeVInt(numberOfQueuedWrites); + out.writeVLong(bufferSize); out.writeVLong(mappingVersion); out.writeVLong(totalFetchTimeMillis); out.writeVLong(totalFetchRemoteTimeMillis); @@ -452,6 +465,10 @@ public class ShardFollowNodeTaskStatus implements Task.Status { builder.field(NUMBER_OF_CONCURRENT_READS_FIELD.getPreferredName(), numberOfConcurrentReads); builder.field(NUMBER_OF_CONCURRENT_WRITES_FIELD.getPreferredName(), numberOfConcurrentWrites); builder.field(NUMBER_OF_QUEUED_WRITES_FIELD.getPreferredName(), numberOfQueuedWrites); + builder.humanReadableField( + BUFFER_SIZE_IN_BYTES_FIELD.getPreferredName(), + "buffer_size", + new ByteSizeValue(bufferSize)); builder.field(MAPPING_VERSION_FIELD.getPreferredName(), mappingVersion); builder.humanReadableField( TOTAL_FETCH_TIME_MILLIS_FIELD.getPreferredName(), @@ -531,6 +548,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { numberOfConcurrentReads == that.numberOfConcurrentReads && numberOfConcurrentWrites == that.numberOfConcurrentWrites && numberOfQueuedWrites == that.numberOfQueuedWrites && + bufferSize == that.bufferSize && mappingVersion == that.mappingVersion && totalFetchTimeMillis == that.totalFetchTimeMillis && totalFetchRemoteTimeMillis == that.totalFetchRemoteTimeMillis && @@ -568,6 +586,7 @@ public class ShardFollowNodeTaskStatus implements Task.Status { numberOfConcurrentReads, numberOfConcurrentWrites, numberOfQueuedWrites, + bufferSize, mappingVersion, totalFetchTimeMillis, totalFetchRemoteTimeMillis, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 5a87666d050..8010c9bf344 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -60,7 +60,12 @@ public class PutAutoFollowPatternAction extends Action { AutoFollowPattern.MAX_BATCH_SIZE, ObjectParser.ValueType.STRING); PARSER.declareInt(Request::setMaxConcurrentWriteBatches, AutoFollowPattern.MAX_CONCURRENT_WRITE_BATCHES); - PARSER.declareInt(Request::setMaxWriteBufferSize, AutoFollowPattern.MAX_WRITE_BUFFER_SIZE); + PARSER.declareInt(Request::setMaxWriteBufferCount, AutoFollowPattern.MAX_WRITE_BUFFER_COUNT); + PARSER.declareField( + Request::setMaxWriteBufferSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), AutoFollowPattern.MAX_WRITE_BUFFER_SIZE.getPreferredName()), + AutoFollowPattern.MAX_WRITE_BUFFER_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareField(Request::setMaxRetryDelay, (p, c) -> TimeValue.parseTimeValue(p.text(), AutoFollowPattern.MAX_RETRY_DELAY.getPreferredName()), AutoFollowPattern.MAX_RETRY_DELAY, ObjectParser.ValueType.STRING); @@ -92,7 +97,8 @@ public class PutAutoFollowPatternAction extends Action { private Integer maxConcurrentReadBatches; private ByteSizeValue maxBatchSize; private Integer maxConcurrentWriteBatches; - private Integer maxWriteBufferSize; + private Integer maxWriteBufferCount; + private ByteSizeValue maxWriteBufferSize; private TimeValue maxRetryDelay; private TimeValue pollTimeout; @@ -190,11 +196,19 @@ public class PutAutoFollowPatternAction extends Action { this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; } - public Integer getMaxWriteBufferSize() { + public Integer getMaxWriteBufferCount() { + return maxWriteBufferCount; + } + + public void setMaxWriteBufferCount(Integer maxWriteBufferCount) { + this.maxWriteBufferCount = maxWriteBufferCount; + } + + public ByteSizeValue getMaxWriteBufferSize() { return maxWriteBufferSize; } - public void setMaxWriteBufferSize(Integer maxWriteBufferSize) { + public void setMaxWriteBufferSize(ByteSizeValue maxWriteBufferSize) { this.maxWriteBufferSize = maxWriteBufferSize; } @@ -225,7 +239,8 @@ public class PutAutoFollowPatternAction extends Action { maxConcurrentReadBatches = in.readOptionalVInt(); maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentWriteBatches = in.readOptionalVInt(); - maxWriteBufferSize = in.readOptionalVInt(); + maxWriteBufferCount = in.readOptionalVInt(); + maxWriteBufferSize = in.readOptionalWriteable(ByteSizeValue::new); maxRetryDelay = in.readOptionalTimeValue(); pollTimeout = in.readOptionalTimeValue(); } @@ -241,7 +256,8 @@ public class PutAutoFollowPatternAction extends Action { out.writeOptionalVInt(maxConcurrentReadBatches); out.writeOptionalWriteable(maxBatchSize); out.writeOptionalVInt(maxConcurrentWriteBatches); - out.writeOptionalVInt(maxWriteBufferSize); + out.writeOptionalVInt(maxWriteBufferCount); + out.writeOptionalWriteable(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); out.writeOptionalTimeValue(pollTimeout); } @@ -262,8 +278,11 @@ public class PutAutoFollowPatternAction extends Action { if (maxBatchSize != null) { builder.field(AutoFollowPattern.MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); } + if (maxWriteBufferCount != null) { + builder.field(AutoFollowPattern.MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); + } if (maxWriteBufferSize != null) { - builder.field(AutoFollowPattern.MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); + builder.field(AutoFollowPattern.MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep()); } if (maxConcurrentReadBatches != null) { builder.field(AutoFollowPattern.MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); @@ -295,6 +314,7 @@ public class PutAutoFollowPatternAction extends Action { Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && Objects.equals(maxBatchSize, request.maxBatchSize) && Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferCount, request.maxWriteBufferCount) && Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && Objects.equals(maxRetryDelay, request.maxRetryDelay) && Objects.equals(pollTimeout, request.pollTimeout); @@ -311,6 +331,7 @@ public class PutAutoFollowPatternAction extends Action { maxConcurrentReadBatches, maxBatchSize, maxConcurrentWriteBatches, + maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, pollTimeout); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 28895a59073..0f36af4db10 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -32,6 +32,7 @@ import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_CONCURRENT_READ_BATCHES; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_CONCURRENT_WRITE_BATCHES; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_RETRY_DELAY_FIELD; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_BUFFER_COUNT; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_BUFFER_SIZE; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.POLL_TIMEOUT; @@ -72,7 +73,12 @@ public final class PutFollowAction extends Action { MAX_BATCH_SIZE, ObjectParser.ValueType.STRING); PARSER.declareInt((request, value) -> request.followRequest.setMaxConcurrentWriteBatches(value), MAX_CONCURRENT_WRITE_BATCHES); - PARSER.declareInt((request, value) -> request.followRequest.setMaxWriteBufferSize(value), MAX_WRITE_BUFFER_SIZE); + PARSER.declareInt((request, value) -> request.followRequest.setMaxWriteBufferCount(value), MAX_WRITE_BUFFER_COUNT); + PARSER.declareField( + (request, value) -> request.followRequest.setMaxWriteBufferSize(value), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_BUFFER_SIZE.getPreferredName()), + MAX_WRITE_BUFFER_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareField( (request, value) -> request.followRequest.setMaxRetryDelay(value), (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY_FIELD.getPreferredName()), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index 127ccf7610f..587223e3fbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -48,6 +48,7 @@ public final class ResumeFollowAction extends Action { static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count"); static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay"); static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); @@ -63,11 +64,16 @@ public final class ResumeFollowAction extends Action { MAX_BATCH_SIZE, ObjectParser.ValueType.STRING); PARSER.declareInt(Request::setMaxConcurrentWriteBatches, MAX_CONCURRENT_WRITE_BATCHES); - PARSER.declareInt(Request::setMaxWriteBufferSize, MAX_WRITE_BUFFER_SIZE); + PARSER.declareInt(Request::setMaxWriteBufferCount, MAX_WRITE_BUFFER_COUNT); + PARSER.declareField( + Request::setMaxWriteBufferSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_BUFFER_SIZE.getPreferredName()), + MAX_WRITE_BUFFER_SIZE, + ObjectParser.ValueType.STRING); PARSER.declareField( Request::setMaxRetryDelay, (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY_FIELD.getPreferredName()), - MAX_RETRY_DELAY_FIELD, + MAX_RETRY_DELAY_FIELD, ObjectParser.ValueType.STRING); PARSER.declareField( Request::setPollTimeout, @@ -140,13 +146,23 @@ public final class ResumeFollowAction extends Action { this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; } - private Integer maxWriteBufferSize; + private Integer maxWriteBufferCount; - public Integer getMaxWriteBufferSize() { + public Integer getMaxWriteBufferCount() { + return maxWriteBufferCount; + } + + public void setMaxWriteBufferCount(Integer maxWriteBufferCount) { + this.maxWriteBufferCount = maxWriteBufferCount; + } + + private ByteSizeValue maxWriteBufferSize; + + public ByteSizeValue getMaxWriteBufferSize() { return maxWriteBufferSize; } - public void setMaxWriteBufferSize(Integer maxWriteBufferSize) { + public void setMaxWriteBufferSize(ByteSizeValue maxWriteBufferSize) { this.maxWriteBufferSize = maxWriteBufferSize; } @@ -192,7 +208,10 @@ public final class ResumeFollowAction extends Action { if (maxConcurrentWriteBatches != null && maxConcurrentWriteBatches < 1) { e = addValidationError(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName() + " must be larger than 0", e); } - if (maxWriteBufferSize != null && maxWriteBufferSize < 1) { + if (maxWriteBufferCount != null && maxWriteBufferCount < 1) { + e = addValidationError(MAX_WRITE_BUFFER_COUNT.getPreferredName() + " must be larger than 0", e); + } + if (maxWriteBufferSize != null && maxWriteBufferSize.compareTo(ByteSizeValue.ZERO) <= 0) { e = addValidationError(MAX_WRITE_BUFFER_SIZE.getPreferredName() + " must be larger than 0", e); } if (maxRetryDelay != null && maxRetryDelay.millis() <= 0) { @@ -217,7 +236,8 @@ public final class ResumeFollowAction extends Action { maxConcurrentReadBatches = in.readOptionalVInt(); maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentWriteBatches = in.readOptionalVInt(); - maxWriteBufferSize = in.readOptionalVInt(); + maxWriteBufferCount = in.readOptionalVInt(); + maxWriteBufferSize = in.readOptionalWriteable(ByteSizeValue::new); maxRetryDelay = in.readOptionalTimeValue(); pollTimeout = in.readOptionalTimeValue(); } @@ -230,7 +250,8 @@ public final class ResumeFollowAction extends Action { out.writeOptionalVInt(maxConcurrentReadBatches); out.writeOptionalWriteable(maxBatchSize); out.writeOptionalVInt(maxConcurrentWriteBatches); - out.writeOptionalVInt(maxWriteBufferSize); + out.writeOptionalVInt(maxWriteBufferCount); + out.writeOptionalWriteable(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); out.writeOptionalTimeValue(pollTimeout); } @@ -253,8 +274,11 @@ public final class ResumeFollowAction extends Action { if (maxBatchSize != null) { builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); } + if (maxWriteBufferCount != null) { + builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); + } if (maxWriteBufferSize != null) { - builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); + builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep()); } if (maxConcurrentReadBatches != null) { builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); @@ -279,6 +303,7 @@ public final class ResumeFollowAction extends Action { Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && Objects.equals(maxBatchSize, request.maxBatchSize) && Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferCount, request.maxWriteBufferCount) && Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && Objects.equals(maxRetryDelay, request.maxRetryDelay) && Objects.equals(pollTimeout, request.pollTimeout) && @@ -293,6 +318,7 @@ public final class ResumeFollowAction extends Action { maxConcurrentReadBatches, maxBatchSize, maxConcurrentWriteBatches, + maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, pollTimeout); diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index bad47024f05..4c2a9792440 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -965,6 +965,9 @@ "number_of_queued_writes": { "type": "long" }, + "buffer_size_in_bytes": { + "type": "long" + }, "mapping_version": { "type": "long" }, From d7afd7c123babeb1481aacdb735743af719ac866 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Wed, 24 Oct 2018 17:57:35 -0400 Subject: [PATCH 48/67] Reduce the number of callouts to 15 in docs relates to #33447 --- .../documentation/CRUDDocumentationIT.java | 32 +++++++++--------- .../high-level/document/term-vectors.asciidoc | 33 +++++++++---------- 2 files changed, 32 insertions(+), 33 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index dd2c6d48322..7f3b980becd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1576,27 +1576,27 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { // tag::term-vectors-term-vectors if (response.getTermVectorsList() != null) { - List tvList = response.getTermVectorsList(); // <1> + List tvList = response.getTermVectorsList(); for (TermVectorsResponse.TermVector tv : tvList) { - String fieldname = tv.getFieldName(); // <2> - int docCount = tv.getFieldStatistics().getDocCount(); // <3> - long sumTotalTermFreq = tv.getFieldStatistics().getSumTotalTermFreq(); // <4> - long sumDocFreq = tv.getFieldStatistics().getSumDocFreq(); // <5> + String fieldname = tv.getFieldName(); // <1> + int docCount = tv.getFieldStatistics().getDocCount(); // <2> + long sumTotalTermFreq = tv.getFieldStatistics().getSumTotalTermFreq(); // <3> + long sumDocFreq = tv.getFieldStatistics().getSumDocFreq(); // <4> if (tv.getTerms() != null) { - List terms = tv.getTerms(); // <6> + List terms = tv.getTerms(); // <5> for (TermVectorsResponse.TermVector.Term term : terms) { - String termStr = term.getTerm(); // <7> - int termFreq = term.getTermFreq(); // <8> - int docFreq = term.getDocFreq(); // <9> - long totalTermFreq = term.getTotalTermFreq(); // <10> - float score = term.getScore(); // <11> + String termStr = term.getTerm(); // <6> + int termFreq = term.getTermFreq(); // <7> + int docFreq = term.getDocFreq(); // <8> + long totalTermFreq = term.getTotalTermFreq(); // <9> + float score = term.getScore(); // <10> if (term.getTokens() != null) { - List tokens = term.getTokens(); // <12> + List tokens = term.getTokens(); // <11> for (TermVectorsResponse.TermVector.Token token : tokens) { - int position = token.getPosition(); // <13> - int startOffset = token.getStartOffset(); // <14> - int endOffset = token.getEndOffset(); // <15> - String payload = token.getPayload(); // <16> + int position = token.getPosition(); // <12> + int startOffset = token.getStartOffset(); // <13> + int endOffset = token.getEndOffset(); // <14> + String payload = token.getPayload(); // <15> } } } diff --git a/docs/java-rest/high-level/document/term-vectors.asciidoc b/docs/java-rest/high-level/document/term-vectors.asciidoc index 1e119d21a91..e739e37732f 100644 --- a/docs/java-rest/high-level/document/term-vectors.asciidoc +++ b/docs/java-rest/high-level/document/term-vectors.asciidoc @@ -78,25 +78,24 @@ include-tagged::{doc-tests-file}[{api}-response] ===== Inspecting Term Vectors If `TermVectorsResponse` contains non-null list of term vectors, -more information about them can be obtained using following: +more information about each term vector can be obtained using the following: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-term-vectors] -------------------------------------------------- -<1> The list of `TermVector` for the document -<2> The name of the current field -<3> Fields statistics for the current field - document count -<4> Fields statistics for the current field - sum of total term frequencies -<5> Fields statistics for the current field - sum of document frequencies -<6> Terms for the current field -<7> The name of the term -<8> Term frequency of the term -<9> Document frequency of the term -<10> Total term frequency of the term -<11> Score of the term -<12> Tokens of the term -<13> Position of the token -<14> Start offset of the token -<15> End offset of the token -<16> Payload of the token +<1> The name of the current field +<2> Fields statistics for the current field - document count +<3> Fields statistics for the current field - sum of total term frequencies +<4> Fields statistics for the current field - sum of document frequencies +<5> Terms for the current field +<6> The name of the term +<7> Term frequency of the term +<8> Document frequency of the term +<9> Total term frequency of the term +<10> Score of the term +<11> Tokens of the term +<12> Position of the token +<13> Start offset of the token +<14> End offset of the token +<15> Payload of the token From 687dc1eb11265fb0abc6a0bce8075744dd69efa8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 24 Oct 2018 15:03:38 -0700 Subject: [PATCH 49/67] Scripting: Remove SearchScript (#34730) This commit removes the last non context based script class. --- ...Script.java => ExpressionScoreScript.java} | 52 ++----- .../expression/ExpressionScriptEngine.java | 116 +++++--------- .../painless/GenericElasticsearchScript.java | 39 ----- .../painless/PainlessScriptEngine.java | 70 ++------- .../elasticsearch/painless/ScriptImpl.java | 112 -------------- .../org/elasticsearch/painless/Debugger.java | 3 +- .../painless/ScriptTestCase.java | 2 +- .../painless/node/NodeToStringTests.java | 4 +- .../search/function/ScriptScoreFunction.java | 6 +- ...cript.java => ExplainableScoreScript.java} | 4 +- .../org/elasticsearch/script/FieldScript.java | 2 +- .../org/elasticsearch/script/ScoreScript.java | 3 +- .../elasticsearch/script/SearchScript.java | 142 ------------------ .../functionscore/ExplainableScriptIT.java | 4 +- .../script/MockScriptEngine.java | 90 ++--------- .../test/MockPainlessScriptEngine.java | 7 +- .../elasticsearch/xpack/watcher/Watcher.java | 7 +- .../watcher/test/WatcherMockScriptPlugin.java | 2 +- .../test/integration/SearchInputTests.java | 1 - 19 files changed, 100 insertions(+), 566 deletions(-) rename modules/lang-expression/src/main/java/org/elasticsearch/script/expression/{ExpressionSearchScript.java => ExpressionScoreScript.java} (59%) delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/GenericElasticsearchScript.java delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java rename server/src/main/java/org/elasticsearch/script/{ExplainableSearchScript.java => ExplainableScoreScript.java} (93%) delete mode 100644 server/src/main/java/org/elasticsearch/script/SearchScript.java diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScoreScript.java similarity index 59% rename from modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java rename to modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScoreScript.java index 7a251f6e6fd..120e8a9cabf 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScoreScript.java @@ -26,27 +26,23 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValuesSource; import org.elasticsearch.script.GeneralScriptException; -import org.elasticsearch.script.SearchScript; +import org.elasticsearch.script.ScoreScript; import java.io.IOException; /** * A bridge to evaluate an {@link Expression} against {@link Bindings} in the context - * of a {@link SearchScript}. + * of a {@link ScoreScript}. */ -class ExpressionSearchScript implements SearchScript.LeafFactory { +class ExpressionScoreScript implements ScoreScript.LeafFactory { - final Expression exprScript; - final SimpleBindings bindings; - final DoubleValuesSource source; - final ReplaceableConstDoubleValueSource specialValue; // _value - final boolean needsScores; + private final Expression exprScript; + private final DoubleValuesSource source; + private final boolean needsScores; - ExpressionSearchScript(Expression e, SimpleBindings b, ReplaceableConstDoubleValueSource v, boolean needsScores) { - exprScript = e; - bindings = b; - source = exprScript.getDoubleValuesSource(bindings); - specialValue = v; + ExpressionScoreScript(Expression e, SimpleBindings b, boolean needsScores) { + this.exprScript = e; + this.source = exprScript.getDoubleValuesSource(b); this.needsScores = needsScores; } @@ -55,15 +51,14 @@ class ExpressionSearchScript implements SearchScript.LeafFactory { return needsScores; } - @Override - public SearchScript newInstance(final LeafReaderContext leaf) throws IOException { - return new SearchScript(null, null, null) { + public ScoreScript newInstance(final LeafReaderContext leaf) throws IOException { + return new ScoreScript(null, null, null) { // Fake the scorer until setScorer is called. DoubleValues values = source.getValues(leaf, new DoubleValues() { @Override public double doubleValue() throws IOException { - return getScore(); + return get_score(); } @Override @@ -73,10 +68,7 @@ class ExpressionSearchScript implements SearchScript.LeafFactory { }); @Override - public Object run() { return Double.valueOf(runAsDouble()); } - - @Override - public double runAsDouble() { + public double execute() { try { return values.doubleValue(); } catch (Exception exception) { @@ -92,24 +84,6 @@ class ExpressionSearchScript implements SearchScript.LeafFactory { throw new IllegalStateException("Can't advance to doc using " + exprScript, e); } } - - @Override - public void setNextAggregationValue(Object value) { - // _value isn't used in script if specialValue == null - if (specialValue != null) { - if (value instanceof Number) { - specialValue.setValue(((Number)value).doubleValue()); - } else { - throw new GeneralScriptException("Cannot use expression with text variable using " + exprScript); - } - } - } - - @Override - public void setNextVar(String name, Object value) { - // other per-document variables aren't supported yet, even if they are numbers - // but we shouldn't encourage this anyway. - } }; } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index d719f7a2cbc..150bca60273 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -23,10 +23,8 @@ import org.apache.lucene.expressions.Expression; import org.apache.lucene.expressions.SimpleBindings; import org.apache.lucene.expressions.js.JavascriptCompiler; import org.apache.lucene.expressions.js.VariableContext; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; -import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Nullable; @@ -48,11 +46,9 @@ import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptException; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.search.lookup.SearchLookup; -import java.io.IOException; import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; @@ -63,8 +59,9 @@ import java.util.List; import java.util.Map; /** - * Provides the infrastructure for Lucene expressions as a scripting language for Elasticsearch. Only - * {@link SearchScript}s are supported. + * Provides the infrastructure for Lucene expressions as a scripting language for Elasticsearch. + * + * Only contexts returning numeric types or {@link Object} are supported. */ public class ExpressionScriptEngine extends AbstractComponent implements ScriptEngine { @@ -111,10 +108,7 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE } } }); - if (context.instanceClazz.equals(SearchScript.class)) { - SearchScript.Factory factory = (p, lookup) -> newSearchScript(expr, lookup, p); - return context.factoryClazz.cast(factory); - } else if (context.instanceClazz.equals(BucketAggregationScript.class)) { + if (context.instanceClazz.equals(BucketAggregationScript.class)) { return context.factoryClazz.cast(newBucketAggregationScriptFactory(expr)); } else if (context.instanceClazz.equals(BucketAggregationSelectorScript.class)) { BucketAggregationScript.Factory factory = newBucketAggregationScriptFactory(expr); @@ -178,40 +172,6 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE }; } - private SearchScript.LeafFactory newSearchScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { - // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, - // instead of complicating SimpleBindings (which should stay simple) - SimpleBindings bindings = new SimpleBindings(); - ReplaceableConstDoubleValueSource specialValue = null; - boolean needsScores = false; - for (String variable : expr.variables) { - try { - if (variable.equals("_score")) { - bindings.add(new SortField("_score", SortField.Type.SCORE)); - needsScores = true; - } else if (variable.equals("_value")) { - specialValue = new ReplaceableConstDoubleValueSource(); - bindings.add("_value", specialValue); - // noop: _value is special for aggregations, and is handled in ExpressionScriptBindings - // TODO: if some uses it in a scoring expression, they will get a nasty failure when evaluating...need a - // way to know this is for aggregations and so _value is ok to have... - } else if (vars != null && vars.containsKey(variable)) { - bindFromParams(vars, bindings, variable); - } else { - // delegate valuesource creation based on field's type - // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods. - final ValueSource valueSource = getDocValueSource(variable, lookup); - needsScores |= valueSource.getSortField(false).needsScores(); - bindings.add(variable, valueSource.asDoubleValuesSource()); - } - } catch (Exception e) { - // we defer "binding" of variables until here: give context for that variable - throw convertToScriptException("link error", expr.sourceText, variable, e); - } - } - return new ExpressionSearchScript(expr, bindings, specialValue, needsScores); - } - private NumberSortScript.LeafFactory newSortScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, // instead of complicating SimpleBindings (which should stay simple) @@ -315,13 +275,13 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE * See https://github.com/elastic/elasticsearch/issues/26429. */ private FilterScript.LeafFactory newFilterScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { - SearchScript.LeafFactory searchLeafFactory = newSearchScript(expr, lookup, vars); + ScoreScript.LeafFactory searchLeafFactory = newScoreScript(expr, lookup, vars); return ctx -> { - SearchScript script = searchLeafFactory.newInstance(ctx); + ScoreScript script = searchLeafFactory.newInstance(ctx); return new FilterScript(vars, lookup, ctx) { @Override public boolean execute() { - return script.runAsDouble() != 0.0; + return script.execute() != 0.0; } @Override public void setDocument(int docid) { @@ -332,39 +292,37 @@ public class ExpressionScriptEngine extends AbstractComponent implements ScriptE } private ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLookup lookup, @Nullable Map vars) { - SearchScript.LeafFactory searchLeafFactory = newSearchScript(expr, lookup, vars); - return new ScoreScript.LeafFactory() { - @Override - public boolean needs_score() { - return searchLeafFactory.needs_score(); + // NOTE: if we need to do anything complicated with bindings in the future, we can just extend Bindings, + // instead of complicating SimpleBindings (which should stay simple) + SimpleBindings bindings = new SimpleBindings(); + ReplaceableConstDoubleValueSource specialValue = null; + boolean needsScores = false; + for (String variable : expr.variables) { + try { + if (variable.equals("_score")) { + bindings.add(new SortField("_score", SortField.Type.SCORE)); + needsScores = true; + } else if (variable.equals("_value")) { + specialValue = new ReplaceableConstDoubleValueSource(); + bindings.add("_value", specialValue); + // noop: _value is special for aggregations, and is handled in ExpressionScriptBindings + // TODO: if some uses it in a scoring expression, they will get a nasty failure when evaluating...need a + // way to know this is for aggregations and so _value is ok to have... + } else if (vars != null && vars.containsKey(variable)) { + bindFromParams(vars, bindings, variable); + } else { + // delegate valuesource creation based on field's type + // there are three types of "fields" to expressions, and each one has a different "api" of variables and methods. + final ValueSource valueSource = getDocValueSource(variable, lookup); + needsScores |= valueSource.getSortField(false).needsScores(); + bindings.add(variable, valueSource.asDoubleValuesSource()); + } + } catch (Exception e) { + // we defer "binding" of variables until here: give context for that variable + throw convertToScriptException("link error", expr.sourceText, variable, e); } - - @Override - public ScoreScript newInstance(LeafReaderContext ctx) throws IOException { - SearchScript script = searchLeafFactory.newInstance(ctx); - return new ScoreScript(vars, lookup, ctx) { - @Override - public double execute() { - return script.runAsDouble(); - } - - @Override - public void setDocument(int docid) { - script.setDocument(docid); - } - - @Override - public void setScorer(Scorable scorer) { - script.setScorer(scorer); - } - - @Override - public double get_score() { - return script.getScore(); - } - }; - } - }; + } + return new ExpressionScoreScript(expr, bindings, needsScores); } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/GenericElasticsearchScript.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/GenericElasticsearchScript.java deleted file mode 100644 index ef2c9513b8e..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/GenericElasticsearchScript.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import org.elasticsearch.index.fielddata.ScriptDocValues; - -import java.util.Map; - -/** - * Generic script interface that Painless implements for all Elasticsearch scripts. - */ -public abstract class GenericElasticsearchScript { - - public GenericElasticsearchScript() {} - - public static final String[] PARAMETERS = new String[] {"params", "_score", "doc", "_value", "ctx"}; - public abstract Object execute( - Map params, double _score, Map> doc, Object _value, Map ctx); - - public abstract boolean needs_score(); - public abstract boolean needsCtx(); -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index 5ed305751c8..4e7ffbfb8d0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless; -import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; @@ -29,7 +28,6 @@ import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptException; -import org.elasticsearch.script.SearchScript; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; @@ -37,7 +35,6 @@ import org.objectweb.asm.commons.GeneratorAdapter; import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.security.AccessControlContext; import java.security.AccessController; @@ -101,13 +98,8 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr for (Map.Entry, List> entry : contexts.entrySet()) { ScriptContext context = entry.getKey(); - if (context.instanceClazz.equals(SearchScript.class)) { - contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, null, null, - PainlessLookupBuilder.buildFromWhitelists(entry.getValue()))); - } else { - contextsToCompilers.put(context, new Compiler(context.instanceClazz, context.factoryClazz, context.statefulFactoryClazz, - PainlessLookupBuilder.buildFromWhitelists(entry.getValue()))); - } + contextsToCompilers.put(context, new Compiler(context.instanceClazz, context.factoryClazz, context.statefulFactoryClazz, + PainlessLookupBuilder.buildFromWhitelists(entry.getValue()))); } this.contextsToCompilers = Collections.unmodifiableMap(contextsToCompilers); @@ -126,54 +118,24 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { Compiler compiler = contextsToCompilers.get(context); - if (context.instanceClazz.equals(SearchScript.class)) { - Constructor constructor = compile(compiler, scriptName, scriptSource, params); - boolean needsScore; + // Check we ourselves are not being called by unprivileged code. + SpecialPermission.check(); - try { - GenericElasticsearchScript newInstance = (GenericElasticsearchScript)constructor.newInstance(); - needsScore = newInstance.needs_score(); - } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { - throw new IllegalArgumentException("internal error"); + // Create our loader (which loads compiled code with no permissions). + final Loader loader = AccessController.doPrivileged(new PrivilegedAction() { + @Override + public Loader run() { + return compiler.createLoader(getClass().getClassLoader()); } + }); - SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() { - @Override - public SearchScript newInstance(final LeafReaderContext context) { - try { - // a new instance is required for the class bindings model to work correctly - GenericElasticsearchScript newInstance = (GenericElasticsearchScript)constructor.newInstance(); - return new ScriptImpl(newInstance, p, lookup, context); - } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { - throw new IllegalArgumentException("internal error"); - } - } - @Override - public boolean needs_score() { - return needsScore; - } - }; - return context.factoryClazz.cast(factory); + MainMethodReserved reserved = new MainMethodReserved(); + compile(contextsToCompilers.get(context), loader, reserved, scriptName, scriptSource, params); + + if (context.statefulFactoryClazz != null) { + return generateFactory(loader, context, reserved, generateStatefulFactory(loader, context, reserved)); } else { - // Check we ourselves are not being called by unprivileged code. - SpecialPermission.check(); - - // Create our loader (which loads compiled code with no permissions). - final Loader loader = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Loader run() { - return compiler.createLoader(getClass().getClassLoader()); - } - }); - - MainMethodReserved reserved = new MainMethodReserved(); - compile(contextsToCompilers.get(context), loader, reserved, scriptName, scriptSource, params); - - if (context.statefulFactoryClazz != null) { - return generateFactory(loader, context, reserved, generateStatefulFactory(loader, context, reserved)); - } else { - return generateFactory(loader, context, reserved, WriterConstants.CLASS_TYPE); - } + return generateFactory(loader, context, reserved, WriterConstants.CLASS_TYPE); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java deleted file mode 100644 index c16c3b1bb8a..00000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptImpl.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless; - -import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.script.SearchScript; -import org.elasticsearch.search.lookup.LeafSearchLookup; -import org.elasticsearch.search.lookup.SearchLookup; - -import java.util.HashMap; -import java.util.Map; -import java.util.function.DoubleSupplier; -import java.util.function.Function; - -/** - * ScriptImpl can be used as a {@link SearchScript} - * to run a previously compiled Painless script. - */ -final class ScriptImpl extends SearchScript { - - /** - * The Painless script that can be run. - */ - private final GenericElasticsearchScript script; - - /** - * A map that can be used to access input parameters at run-time. - */ - private final Map variables; - - /** - * Looks up the {@code _score} from {@link #scorer} if {@code _score} is used, otherwise returns {@code 0.0}. - */ - private final DoubleSupplier scoreLookup; - - /** - * Looks up the {@code ctx} from the {@link #variables} if {@code ctx} is used, otherwise return {@code null}. - */ - private final Function, Map> ctxLookup; - - /** - * Current _value for aggregation - * @see #setNextAggregationValue(Object) - */ - private Object aggregationValue; - - /** - * Creates a ScriptImpl for the a previously compiled Painless script. - * @param script The previously compiled Painless script. - * @param vars The initial variables to run the script with. - * @param lookup The lookup to allow search fields to be available if this is run as a search script. - */ - ScriptImpl(GenericElasticsearchScript script, Map vars, SearchLookup lookup, LeafReaderContext leafContext) { - super(null, lookup, leafContext); - this.script = script; - this.variables = new HashMap<>(); - - if (vars != null) { - variables.putAll(vars); - } - LeafSearchLookup leafLookup = getLeafLookup(); - if (leafLookup != null) { - variables.putAll(leafLookup.asMap()); - } - - scoreLookup = script.needs_score() ? this::getScore : () -> 0.0; - ctxLookup = script.needsCtx() ? variables -> (Map) variables.get("ctx") : variables -> null; - } - - @Override - public Map getParams() { - return variables; - } - - @Override - public void setNextVar(final String name, final Object value) { - variables.put(name, value); - } - - @Override - public void setNextAggregationValue(Object value) { - this.aggregationValue = value; - } - - @Override - public Object run() { - return script.execute(variables, scoreLookup.getAsDouble(), getDoc(), aggregationValue, ctxLookup.apply(variables)); - } - - @Override - public double runAsDouble() { - return ((Number)run()).doubleValue(); - } - -} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java index ae33ebfb6e9..38c315b1344 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless; +import org.elasticsearch.painless.PainlessExecuteAction.PainlessTestScript; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.util.Textifier; @@ -31,7 +32,7 @@ final class Debugger { /** compiles source to bytecode, and returns debugging output */ static String toString(final String source) { - return toString(GenericElasticsearchScript.class, source, new CompilerSettings()); + return toString(PainlessTestScript.class, source, new CompilerSettings()); } /** compiles to bytecode, and returns debugging output */ diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 5a4c5de015b..19fece29e42 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -91,7 +91,7 @@ public abstract class ScriptTestCase extends ESTestCase { public Object exec(String script, Map vars, Map compileParams, boolean picky) { // test for ambiguity errors before running the actual script if picky is true if (picky) { - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(PAINLESS_LOOKUP, GenericElasticsearchScript.class); + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(PAINLESS_LOOKUP, PainlessTestScript.class); CompilerSettings pickySettings = new CompilerSettings(); pickySettings.setPicky(true); pickySettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(scriptEngineSettings())); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index 12d57fab11d..9a284a26978 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -21,10 +21,10 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.FeatureTest; -import org.elasticsearch.painless.GenericElasticsearchScript; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.PainlessExecuteAction.PainlessTestScript; import org.elasticsearch.painless.ScriptClassInfo; import org.elasticsearch.painless.antlr.Walker; import org.elasticsearch.painless.lookup.PainlessCast; @@ -897,7 +897,7 @@ public class NodeToStringTests extends ESTestCase { } private SSource walk(String code) { - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class); + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, PainlessTestScript.class); CompilerSettings compilerSettings = new CompilerSettings(); compilerSettings.setRegexesEnabled(true); try { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 5edc1659f54..6c55ef2e934 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -22,7 +22,7 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Scorable; -import org.elasticsearch.script.ExplainableSearchScript; +import org.elasticsearch.script.ExplainableScoreScript; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; @@ -75,11 +75,11 @@ public class ScriptScoreFunction extends ScoreFunction { @Override public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { Explanation exp; - if (leafScript instanceof ExplainableSearchScript) { + if (leafScript instanceof ExplainableScoreScript) { leafScript.setDocument(docId); scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); - exp = ((ExplainableSearchScript) leafScript).explain(subQueryScore); + exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); } else { double score = score(docId, subQueryScore.getValue().floatValue()); String explanation = "script score function, computed with script:\"" + sScript + "\""; diff --git a/server/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java b/server/src/main/java/org/elasticsearch/script/ExplainableScoreScript.java similarity index 93% rename from server/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java rename to server/src/main/java/org/elasticsearch/script/ExplainableScoreScript.java index c3970212751..d181d8c6784 100644 --- a/server/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java +++ b/server/src/main/java/org/elasticsearch/script/ExplainableScoreScript.java @@ -43,11 +43,11 @@ import org.apache.lucene.search.Explanation; import java.io.IOException; /** - * To be implemented by {@link SearchScript} which can provided an {@link Explanation} of the score + * To be implemented by {@link ScoreScript} which can provided an {@link Explanation} of the score * This is currently not used inside elasticsearch but it is used, see for example here: * https://github.com/elastic/elasticsearch/issues/8561 */ -public interface ExplainableSearchScript { +public interface ExplainableScoreScript { /** * Build the explanation of the current document being scored diff --git a/server/src/main/java/org/elasticsearch/script/FieldScript.java b/server/src/main/java/org/elasticsearch/script/FieldScript.java index 98649dbb330..29684a64477 100644 --- a/server/src/main/java/org/elasticsearch/script/FieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/FieldScript.java @@ -94,7 +94,7 @@ public abstract class FieldScript { leafLookup.setDocument(docid); } - /** A factory to construct {@link SearchScript} instances. */ + /** A factory to construct {@link FieldScript} instances. */ public interface LeafFactory { FieldScript newInstance(LeafReaderContext ctx) throws IOException; } diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index 11b135e9a65..5c533298cbe 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -46,7 +46,8 @@ public abstract class ScoreScript { public ScoreScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { this.params = params; - this.leafLookup = lookup.getLeafSearchLookup(leafContext); + // null check needed b/c of expression engine subclass + this.leafLookup = lookup == null ? null : lookup.getLeafSearchLookup(leafContext); } public abstract double execute(); diff --git a/server/src/main/java/org/elasticsearch/script/SearchScript.java b/server/src/main/java/org/elasticsearch/script/SearchScript.java deleted file mode 100644 index 2fd439564a6..00000000000 --- a/server/src/main/java/org/elasticsearch/script/SearchScript.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.script; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorable; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.lucene.ScorerAware; -import org.elasticsearch.search.lookup.LeafDocLookup; -import org.elasticsearch.search.lookup.LeafSearchLookup; -import org.elasticsearch.search.lookup.SearchLookup; - -import java.io.IOException; -import java.util.Map; - -/** - * A generic script used for per document use cases. - * - * Using a {@link SearchScript} works as follows: - *
    - *
  1. Construct a {@link Factory} using {@link ScriptService#compile(Script, ScriptContext)}
  2. - *
  3. Construct a {@link LeafFactory} for a an index using {@link Factory#newFactory(Map, SearchLookup)}
  4. - *
  5. Construct a {@link SearchScript} for a Lucene segment using {@link LeafFactory#newInstance(LeafReaderContext)}
  6. - *
  7. Call {@link #setDocument(int)} to indicate which document in the segment the script should be run for next
  8. - *
  9. Call one of the {@code run} methods: {@link #run()} or {@link #runAsDouble()}
  10. - *
- */ -public abstract class SearchScript implements ScorerAware { - - /** The generic runtime parameters for the script. */ - private final Map params; - - /** A leaf lookup for the bound segment this script will operate on. */ - private final LeafSearchLookup leafLookup; - - /** A scorer that will return the score for the current document when the script is run. */ - private Scorable scorer; - - public SearchScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { - this.params = params; - // TODO: remove leniency when painless does not implement SearchScript for executable script cases - this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); - } - - /** Return the parameters for this script. */ - public Map getParams() { - return params; - } - - /** The leaf lookup for the Lucene segment this script was created for. */ - protected final LeafSearchLookup getLeafLookup() { - return leafLookup; - } - - /** The doc lookup for the Lucene segment this script was created for. */ - public final LeafDocLookup getDoc() { - // TODO: remove leniency when painless does not implement SearchScript for executable script cases - return leafLookup == null ? null : leafLookup.doc(); - } - - /** Set the current document to run the script on next. */ - public void setDocument(int docid) { - // TODO: remove leniency when painless does not implement SearchScript for executable script cases - if (leafLookup != null) { - leafLookup.setDocument(docid); - } - } - - @Override - public void setScorer(Scorable scorer) { - this.scorer = scorer; - } - - /** Return the score of the current document. */ - public double getScore() { - // TODO: remove leniency when painless does not implement SearchScript for executable script cases - if (scorer == null) { - return 0.0d; - } - try { - return scorer.score(); - } catch (IOException e) { - throw new ElasticsearchException("couldn't lookup score", e); - } - } - - /** - * Sets per-document aggregation {@code _value}. - *

- * The default implementation just calls {@code setNextVar("_value", value)} but - * some engines might want to handle this differently for better performance. - *

- * @param value per-document value, typically a String, Long, or Double - */ - public void setNextAggregationValue(Object value) { - setNextVar("_value", value); - } - - public void setNextVar(String field, Object value) {} - - - public Object run() { - return runAsDouble(); - } - - /** Return the result as a double. This is the main use case of search script, used for document scoring. */ - public abstract double runAsDouble(); - - /** A factory to construct {@link SearchScript} instances. */ - public interface LeafFactory { - SearchScript newInstance(LeafReaderContext ctx) throws IOException; - - /** - * Return {@code true} if the script needs {@code _score} calculated, or {@code false} otherwise. - */ - boolean needs_score(); - } - - /** A factory to construct stateful {@link SearchScript} factories for a specific index. */ - public interface Factory { - LeafFactory newFactory(Map params, SearchLookup lookup); - } - - /** The context used to compile {@link SearchScript} factories. */ - public static final ScriptContext CONTEXT = new ScriptContext<>("search", Factory.class); -} diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index c9679ae2ea9..c4b085e84cf 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.ExplainableSearchScript; +import org.elasticsearch.script.ExplainableScoreScript; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -94,7 +94,7 @@ public class ExplainableScriptIT extends ESIntegTestCase { } } - static class MyScript extends ScoreScript implements ExplainableSearchScript { + static class MyScript extends ScoreScript implements ExplainableScoreScript { MyScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { super(params, lookup, leafContext); diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 9ccee034b6b..0379012d2b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -90,8 +90,16 @@ public class MockScriptEngine implements ScriptEngine { "did you declare the mocked script?"); } MockCompiledScript mockCompiled = new MockCompiledScript(name, params, source, script); - if (context.instanceClazz.equals(SearchScript.class)) { - SearchScript.Factory factory = mockCompiled::createSearchScript; + if (context.instanceClazz.equals(FieldScript.class)) { + FieldScript.Factory factory = (parameters, lookup) -> + ctx -> new FieldScript(parameters, lookup, ctx) { + @Override + public Object execute() { + Map vars = createVars(parameters); + vars.putAll(getLeafLookup().asMap()); + return script.apply(vars); + } + }; return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(FieldScript.class)) { FieldScript.Factory factory = (parameters, lookup) -> @@ -311,20 +319,6 @@ public class MockScriptEngine implements ScriptEngine { return name; } - public SearchScript.LeafFactory createSearchScript(Map params, SearchLookup lookup) { - Map context = new HashMap<>(); - if (options != null) { - context.putAll(options); // TODO: remove this once scripts know to look for options under options key - context.put("options", options); - } - if (params != null) { - context.putAll(params); // TODO: remove this once scripts know to look for params under params key - context.put("params", params); - } - return new MockSearchScript(lookup, context, script != null ? script : ctx -> source); - } - - public FilterScript.LeafFactory createFilterScript(Map params, SearchLookup lookup) { return new MockFilterScript(lookup, params, script); } @@ -361,62 +355,6 @@ public class MockScriptEngine implements ScriptEngine { } } - public class MockSearchScript implements SearchScript.LeafFactory { - - private final Function, Object> script; - private final Map vars; - private final SearchLookup lookup; - - public MockSearchScript(SearchLookup lookup, Map vars, Function, Object> script) { - this.lookup = lookup; - this.vars = vars; - this.script = script; - } - - @Override - public SearchScript newInstance(LeafReaderContext context) throws IOException { - LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); - - Map ctx = new HashMap<>(leafLookup.asMap()); - if (vars != null) { - ctx.putAll(vars); - } - - return new SearchScript(vars, lookup, context) { - @Override - public Object run() { - return script.apply(ctx); - } - - @Override - public double runAsDouble() { - return ((Number) run()).doubleValue(); - } - - @Override - public void setNextVar(String name, Object value) { - ctx.put(name, value); - } - - @Override - public void setScorer(Scorable scorer) { - ctx.put("_score", new ScoreAccessor(scorer)); - } - - @Override - public void setDocument(int doc) { - leafLookup.setDocument(doc); - } - }; - } - - @Override - public boolean needs_score() { - return true; - } - } - - public static class MockFilterScript implements FilterScript.LeafFactory { private final Function, Object> script; @@ -602,10 +540,10 @@ public class MockScriptEngine implements ScriptEngine { public class MockScoreScript implements ScoreScript.Factory { - private final Function, Object> scripts; + private final Function, Object> script; - MockScoreScript(Function, Object> scripts) { - this.scripts = scripts; + public MockScoreScript(Function, Object> script) { + this.script = script; } @Override @@ -627,7 +565,7 @@ public class MockScriptEngine implements ScriptEngine { if (scorerHolder[0] != null) { vars.put("_score", new ScoreAccessor(scorerHolder[0])); } - return ((Number) scripts.apply(vars)).doubleValue(); + return ((Number) script.apply(vars)).doubleValue(); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/monitoring/test/MockPainlessScriptEngine.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/monitoring/test/MockPainlessScriptEngine.java index 5aeeb47db7e..2052cebe1d0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/monitoring/test/MockPainlessScriptEngine.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/monitoring/test/MockPainlessScriptEngine.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.core.monitoring.test; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.script.SearchScript; import java.util.Collection; import java.util.Collections; @@ -43,9 +43,8 @@ public class MockPainlessScriptEngine extends MockScriptEngine { @Override public T compile(String name, String script, ScriptContext context, Map options) { - MockCompiledScript compiledScript = new MockCompiledScript(name, options, script, p -> script); - if (context.instanceClazz.equals(SearchScript.class)) { - return context.factoryClazz.cast((SearchScript.Factory) compiledScript::createSearchScript); + if (context.instanceClazz.equals(ScoreScript.class)) { + return context.factoryClazz.cast(new MockScoreScript(p -> 0.0)); } throw new IllegalArgumentException("mock painless does not know how to handle context [" + context.name + "]"); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 14e96678d14..f87d9454f2d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -50,7 +50,6 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -223,9 +222,6 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa Setting.byteSizeSetting("xpack.watcher.bulk.size", new ByteSizeValue(1, ByteSizeUnit.MB), new ByteSizeValue(1, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.MB), NodeScope); - - public static final ScriptContext SCRIPT_SEARCH_CONTEXT = - new ScriptContext<>("xpack", SearchScript.Factory.class); public static final ScriptContext SCRIPT_TEMPLATE_CONTEXT = new ScriptContext<>("xpack_template", TemplateScript.Factory.class); @@ -670,8 +666,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa @Override public List> getContexts() { - return Arrays.asList(Watcher.SCRIPT_SEARCH_CONTEXT, WatcherTransformScript.CONTEXT, - WatcherConditionScript.CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT); + return Arrays.asList(WatcherTransformScript.CONTEXT, WatcherConditionScript.CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT); } @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java index 2908dbaa6cc..5a03a3c0e64 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/WatcherMockScriptPlugin.java @@ -56,7 +56,7 @@ public abstract class WatcherMockScriptPlugin extends MockScriptPlugin { } public static final List> CONTEXTS = Collections.unmodifiableList(Arrays.asList( - WatcherConditionScript.CONTEXT, WatcherTransformScript.CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT, Watcher.SCRIPT_SEARCH_CONTEXT + WatcherConditionScript.CONTEXT, WatcherTransformScript.CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT )); @Override diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index 45b85caacc0..5c0562c0a00 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -76,7 +76,6 @@ public class SearchInputTests extends ESTestCase { engines.put(MockMustacheScriptEngine.NAME, new MockMustacheScriptEngine()); Map> contexts = new HashMap<>(); contexts.put(Watcher.SCRIPT_TEMPLATE_CONTEXT.name, Watcher.SCRIPT_TEMPLATE_CONTEXT); - contexts.put(Watcher.SCRIPT_SEARCH_CONTEXT.name, Watcher.SCRIPT_SEARCH_CONTEXT); contexts.put(WatcherTransformScript.CONTEXT.name, WatcherTransformScript.CONTEXT); scriptService = new ScriptService(Settings.EMPTY, engines, contexts); From 153157e56d5f8509b5cc503de56113f7025d4c4f Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 24 Oct 2018 23:17:45 -0400 Subject: [PATCH 50/67] Enable ingest attachment docs tests on JDK 11 (#34770) These were disabled because ingest attachement was not playing well with JDK 11. We have addressed that by upgrading the Tika dependency, yet forgot to reenable these. This commit enables these tests again. --- docs/build.gradle | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/docs/build.gradle b/docs/build.gradle index dd1846dc045..ce560e1ca42 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -61,17 +61,7 @@ integTestCluster { systemProperty 'es.scripting.update.ctx_in_params', 'false' } -// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed -if (rootProject.ext.compilerJavaVersion.isJava11()) { - integTestRunner { - systemProperty 'tests.rest.blacklist', [ - 'plugins/ingest-attachment/line_164', - 'plugins/ingest-attachment/line_117' - ].join(',') - } -} -// Build the cluster with all plugins - +// build the cluster with all plugins project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj -> /* Skip repositories. We just aren't going to be able to test them so it * doesn't make sense to waste time installing them. */ From 59536966c2c90256aee87101c70b8074b532b165 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 25 Oct 2018 08:50:50 +0300 Subject: [PATCH 51/67] Add a new "contains" feature (#34738) The contains syntax was added in #30874 but the skips were not properly put in place. The java runner has the feature so the tests will run as part of the build, but language clients will be able to support it at their own pace. --- .../test/resources/rest-api-spec/test/stats/10_basic.yml | 3 +++ .../rest-api-spec/test/analysis-common/10_basic.yml | 3 +++ .../test/resources/rest-api-spec/test/ingest/10_basic.yml | 3 +++ .../rest-api-spec/test/lang_expression/10_basic.yml | 3 +++ .../resources/rest-api-spec/test/lang_mustache/10_basic.yml | 3 +++ .../test/resources/rest-api-spec/test/painless/10_basic.yml | 3 +++ .../rest-api-spec/test/repository_url/10_basic.yml | 3 +++ .../src/test/resources/rest-api-spec/test/10_basic.yml | 3 +++ .../rest-api-spec/test/discovery_azure_classic/10_basic.yml | 3 +++ .../resources/rest-api-spec/test/discovery_ec2/10_basic.yml | 3 +++ .../resources/rest-api-spec/test/discovery_gce/10_basic.yml | 3 +++ .../rest-api-spec/test/custom-suggester/10_basic.yml | 3 +++ .../rest-api-spec/test/painless_whitelist/10_basic.yml | 3 +++ .../rest-api-spec/test/example-rescore/10_basic.yml | 3 +++ .../rest-api-spec/test/script_expert_scoring/10_basic.yml | 3 +++ .../rest-api-spec/test/ingest_attachment/10_basic.yml | 3 +++ .../resources/rest-api-spec/test/ingest_geoip/10_basic.yml | 3 +++ .../rest-api-spec/test/ingest-useragent/10_basic.yml | 3 +++ .../rest-api-spec/test/repository_azure/10_basic.yml | 3 +++ .../rest-api-spec/test/repository_gcs/10_basic.yml | 3 +++ .../rest-api-spec/test/hdfs_repository/10_basic.yml | 3 +++ .../rest-api-spec/test/secure_hdfs_repository/10_basic.yml | 3 +++ .../resources/rest-api-spec/test/repository_s3/10_basic.yml | 3 +++ .../resources/rest-api-spec/test/store_smb/10_basic.yml | 3 +++ .../java/org/elasticsearch/test/rest/yaml/Features.java | 4 +++- .../rest-api-spec/test/privileges/40_get_user_privs.yml | 6 ++++++ .../test/resources/rest-api-spec/test/xpack/10_basic.yml | 3 +++ 27 files changed, 84 insertions(+), 1 deletion(-) diff --git a/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml b/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml index cde34dfa107..2416d2b2b31 100644 --- a/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml +++ b/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Matrix Aggs Plugin # "Matrix stats aggs loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml index b9b905639fd..ca6cd2e953b 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml @@ -1,4 +1,7 @@ "Module loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml index eb23b7840ee..f83a9e78cb3 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml @@ -1,4 +1,7 @@ "Ingest common installed": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml index 0ca21cab930..00ad6f890b0 100644 --- a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml +++ b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Expression scripts # "Expression loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml index 1a014e9ccea..0e853d62731 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Mustache scripts # "Mustache loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml index 6d008a484ee..e442b40ffb8 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Painless Plugin # "Painless plugin loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml b/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml index b8181040665..1adbfc73bc7 100644 --- a/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml +++ b/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml @@ -103,6 +103,9 @@ teardown: --- "Module repository-url is loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml index e8b23fa7140..19728c7d34c 100644 --- a/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml +++ b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Netty transport # "Netty loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml index 6d12da177ea..39aa9929f8a 100644 --- a/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml +++ b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Azure Classic Discovery component # "Discovery Azure Classic loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml index 3c5866663b9..ba51c623fe8 100644 --- a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml +++ b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Discovery EC2 component # "Discovery EC2 loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml index f16599c40fa..a5379c2c68b 100644 --- a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml +++ b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for Discovery GCE components # "Discovery GCE loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/10_basic.yml b/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/10_basic.yml index 29fbcdac99d..ed8d0f78a09 100644 --- a/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/10_basic.yml +++ b/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/10_basic.yml @@ -1,6 +1,9 @@ # tests that the custom suggester plugin is installed --- "plugin loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml index a915c08067e..1b887058237 100644 --- a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for the painless whitelist example plugin # "Plugin loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml b/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml index 62a47df9d78..f0d0bcb35fa 100644 --- a/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml +++ b/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for the expert scoring script example plugin # "Plugin loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml b/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml index 26980a95b73..70842d5e767 100644 --- a/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml +++ b/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for the expert scoring script example plugin # "Plugin loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml index 42be90f77f9..607fa5bf8b7 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml @@ -1,4 +1,7 @@ "Ingest attachment plugin installed": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml index 413745eab40..ef6346d4256 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml @@ -1,4 +1,7 @@ "Ingest plugin installed": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml index 4cb1c9b1fba..8fef34604b7 100644 --- a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml +++ b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml @@ -1,4 +1,7 @@ "ingest-user-agent plugin installed": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml index 199d543dda8..fe21a295e37 100644 --- a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml +++ b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for repository-azure # "Plugin repository-azure is loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml index 5c8fa70bb7a..072836280b3 100644 --- a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml +++ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for repository-gcs # "Plugin repository-gcs is loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml index f11e0148402..bc419d75ba7 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml @@ -3,6 +3,9 @@ # Check plugin is installed # "Plugin loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml index f11e0148402..bc419d75ba7 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml @@ -3,6 +3,9 @@ # Check plugin is installed # "Plugin loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml index 190a628f0b3..cde14321805 100644 --- a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for repository-s3 # "Plugin repository-s3 is loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml index 60228c1b923..8956b3a8c11 100644 --- a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml +++ b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for SMB Store component # "SMB Store loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java index cfce0653d31..d3fb500ac05 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java @@ -46,7 +46,9 @@ public final class Features { "stash_in_path", "stash_path_replace", "warnings", - "yaml")); + "yaml", + "contains" + )); private Features() { diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/40_get_user_privs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/40_get_user_privs.yml index eccd37565c7..2019d4586a7 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/40_get_user_privs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/privileges/40_get_user_privs.yml @@ -202,6 +202,9 @@ teardown: --- "Test get_user_privileges for single role": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: headers: { Authorization: "Basic dGVzdC0xOjEyMzQ1Njc4" } # test-1 xpack.security.get_user_privileges: {} @@ -261,6 +264,9 @@ teardown: --- "Test get_user_privileges for merged roles": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: headers: { Authorization: "Basic dGVzdC0zOjEyMzQ1Njc4" } # test-3 xpack.security.get_user_privileges: {} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml index 2aea0126e9e..47e1a1160b5 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml @@ -1,6 +1,9 @@ # Integration tests for monitoring # "X-Pack loaded": + - skip: + reason: "contains is a newly added assertion" + features: contains - do: cluster.state: {} From e7ced94a65886eade5d6bbb461e194e493094228 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Thu, 25 Oct 2018 07:51:14 +0200 Subject: [PATCH 52/67] NETWORKING: Add SSL Handler before other Handlers (#34636) * NETWORKING: Add SSL Handler before other Handlers * The only way to run into the issue in #33998 is for `Netty4MessageChannelHandler` to be in the pipeline while the SslHandler is not. Adding the SslHandler before any other handlers should ensure correct ordering here even when we handle upstream events in our own thread pool * Ensure that channels that were closed concurrently don't trip the assertion * Closes #33998 --- .../transport/netty4/SecurityNetty4Transport.java | 3 ++- .../xpack/security/transport/SSLEngineUtils.java | 9 ++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index 36b480c29c7..e76302aebb0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -157,11 +157,12 @@ public class SecurityNetty4Transport extends Netty4Transport { @Override protected void initChannel(Channel ch) throws Exception { - super.initChannel(ch); SSLEngine serverEngine = sslService.createSSLEngine(configuration, null, -1); serverEngine.setUseClientMode(false); final SslHandler sslHandler = new SslHandler(serverEngine); ch.pipeline().addFirst("sslhandler", sslHandler); + super.initChannel(ch); + assert ch.pipeline().first() == sslHandler : "SSL handler must be first handler in pipeline"; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java index 5bbcbaa0509..32b153b1935 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.transport; import io.netty.channel.Channel; +import io.netty.channel.ChannelException; import io.netty.handler.ssl.SslHandler; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; @@ -59,7 +60,13 @@ public class SSLEngineUtils { if (tcpChannel instanceof Netty4TcpChannel) { Channel nettyChannel = ((Netty4TcpChannel) tcpChannel).getNettyChannel(); SslHandler handler = nettyChannel.pipeline().get(SslHandler.class); - assert handler != null : "Must have SslHandler"; + if (handler == null) { + if (nettyChannel.isOpen()) { + assert false : "Must have SslHandler"; + } else { + throw new ChannelException("Channel is closed."); + } + } return handler.engine(); } else if (tcpChannel instanceof NioTcpChannel) { SocketChannelContext context = ((NioTcpChannel) tcpChannel).getContext(); From bb807b147b5a51e108621c51eb4a67a0fc3e98ec Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 25 Oct 2018 08:09:45 +0200 Subject: [PATCH 53/67] fixed test --- .../rest-api-spec/test/painless_whitelist/40_instance.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml index 6cb7e4f3d40..712294baa6d 100644 --- a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/40_instance.yml @@ -1,6 +1,6 @@ # Example tests using an instance binding -"custom instance binding": +" custom instance binding": - do: index: index: test @@ -11,8 +11,8 @@ indices.refresh: {} - do: - index: test search: + index: test body: query: match_all: {} @@ -26,8 +26,8 @@ - match: { hits.hits.0.fields.sNum1.0: 2 } - do: - index: test search: + index: test body: query: match_all: {} From 13ca942152c611b2c44689e226cdfdf00b59e71a Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 25 Oct 2018 10:03:23 +0300 Subject: [PATCH 54/67] Switch build-tools to latest target version (#34746) - we already require Java 11 to build, yet we target the minimum supported version in build-tools ( currently 8 ) - this is because we have some checks that are executed in a new JVM which could be running the minimum version. - For everything else it would be nice to be able to use new features, like the new process API. With this change, we selectively compile the few classes that need an older target version and move everything over to Java 10. Unfortunately the current Gradle version does not support 11 as a target version yet. --- buildSrc/build.gradle | 55 ++++++++++++++----- .../elasticsearch/gradle/JdkJarHellCheck.java | 0 .../test/NamingConventionsCheck.java | 0 3 files changed, 41 insertions(+), 14 deletions(-) rename buildSrc/src/main/{java => minimumRuntime}/org/elasticsearch/gradle/JdkJarHellCheck.java (100%) rename buildSrc/src/main/{java => minimumRuntime}/org/elasticsearch/test/NamingConventionsCheck.java (100%) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 82b1d8525b1..71828468e64 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -31,22 +31,12 @@ if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion)) { throw new GradleException("Gradle ${minimumGradleVersion}+ is required to build elasticsearch") } -if (JavaVersion.current() < JavaVersion.VERSION_1_8) { - throw new GradleException('Java 1.8 is required to build elasticsearch gradle tools') -} - if (project == rootProject) { // change the build dir used during build init, so that doing a clean // won't wipe out the buildscript jar buildDir = 'build-bootstrap' } -// Make sure :buildSrc: doesn't generate classes incompatible with RUNTIME_JAVA_HOME -// We can't use BuildPlugin here, so read from file -String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim() -targetCompatibility = minimumRuntimeVersion -sourceCompatibility = minimumRuntimeVersion - /***************************************************************************** * Propagating version.properties to the rest of the build * *****************************************************************************/ @@ -82,6 +72,45 @@ processResources { from tempPropertiesFile } + +if (JavaVersion.current() < JavaVersion.VERSION_1_10) { + throw new GradleException('At least Java 10 is required to build elasticsearch gradle tools') +} + +/***************************************************************************** + * Java version * + *****************************************************************************/ + +// Gradle 4.10 does not support setting this to 11 yet +targetCompatibility = "10" +sourceCompatibility = "10" + +// We have a few classes that need to be compiled for older java versions because these are used to run checks against +// those +sourceSets { + minimumRuntime { + // We only want Java here, but the Groovy doesn't configure javadoc correctly if we don't define this as groovy + groovy { + srcDirs = ['src/main/minimumRuntime'] + } + } +} +compileMinimumRuntimeGroovy { + // We can't use BuildPlugin here, so read from file + String minimumRuntimeVersion = file('src/main/resources/minimumRuntimeVersion').text.trim() + targetCompatibility = minimumRuntimeVersion + sourceCompatibility = minimumRuntimeVersion +} +dependencies { + compile sourceSets.minimumRuntime.output + minimumRuntimeCompile "junit:junit:${props.getProperty('junit')}" + minimumRuntimeCompile localGroovy() +} +jar { + from sourceSets.minimumRuntime.output +} + + /***************************************************************************** * Dependencies used by the entire build * *****************************************************************************/ @@ -94,10 +123,7 @@ dependencies { compile localGroovy() compile "com.carrotsearch.randomizedtesting:junit4-ant:${props.getProperty('randomizedrunner')}" compile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}" - - compile("junit:junit:${props.getProperty('junit')}") { - transitive = false - } + compile 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' compile 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' compile 'com.netflix.nebula:gradle-info-plugin:3.0.3' @@ -156,6 +182,7 @@ if (project != rootProject) { dependenciesInfo.enabled = false forbiddenApisMain.enabled = false forbiddenApisTest.enabled = false + forbiddenApisMinimumRuntime.enabled = false jarHell.enabled = false thirdPartyAudit.enabled = false diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java b/buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/JdkJarHellCheck.java similarity index 100% rename from buildSrc/src/main/java/org/elasticsearch/gradle/JdkJarHellCheck.java rename to buildSrc/src/main/minimumRuntime/org/elasticsearch/gradle/JdkJarHellCheck.java diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java b/buildSrc/src/main/minimumRuntime/org/elasticsearch/test/NamingConventionsCheck.java similarity index 100% rename from buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java rename to buildSrc/src/main/minimumRuntime/org/elasticsearch/test/NamingConventionsCheck.java From ff49e79d405cf359e9f8d5e031117172b401d79f Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 25 Oct 2018 04:36:15 -0400 Subject: [PATCH 55/67] CCR: Rename follow-task parameters and stats (#34836) * CCR: Rename follow parameters and stats This commit renames the follow-task parameters and its stats. Below are the changes: ## Params - remote_cluster (unchanged) - leader_index (unchanged) - max_read_request_operation_count -> max_read_request_operation_count - max_batch_size -> max_read_request_size - max_write_request_operation_count (new) - max_write_request_size (new) - max_concurrent_read_batches -> max_outstanding_read_requests - max_concurrent_write_batches -> max_outstanding_write_requests - max_write_buffer_size (unchanged) - max_write_buffer_count (unchanged) - max_retry_delay (unchanged) - poll_timeout -> read_poll_timeout ## Stats - remote_cluster (unchanged) - leader_index (unchanged) - follower_index (unchanged) - shard_id (unchanged) - leader_global_checkpoint (unchanged) - leader_max_seq_no (unchanged) - follower_global_checkpoint (unchanged) - follower_max_seq_no (unchanged) - last_requested_seq_no (unchanged) - number_of_concurrent_reads -> outstanding_read_requests - number_of_concurrent_writes -> outstanding_write_requests - buffer_size_in_bytes -> write_buffer_size_in_bytes (new) - number_of_queued_writes -> write_buffer_operation_count - mapping_version -> follower_mapping_version - total_fetch_time_millis -> total_read_time_millis - total_fetch_remote_time_millis -> total_read_remote_exec_time_millis - number_of_successful_fetches -> successful_read_requests - number_of_failed_fetches -> failed_read_requests - operation_received -> operations_read - total_transferred_bytes -> bytes_read - total_index_time_millis -> total_write_time_millis [?] - number_of_successful_bulk_operations -> successful_write_requests - number_of_failed_bulk_operations -> failed_write_requests - number_of_operations_indexed -> operations_written - fetch_exception -> read_exceptions - time_since_last_read_millis -> time_since_last_read_millis * add test for max_write_request_(operation_count|size) --- .../rest-api-spec/test/ccr/auto_follow.yml | 6 +- .../rest-api-spec/test/ccr/follow_stats.yml | 30 +- .../xpack/ccr/ESCCRRestTestCase.java | 8 +- .../ccr/action/AutoFollowCoordinator.java | 12 +- .../xpack/ccr/action/ShardChangesAction.java | 4 +- .../xpack/ccr/action/ShardFollowNodeTask.java | 134 ++--- .../xpack/ccr/action/ShardFollowTask.java | 161 +++--- .../ccr/action/ShardFollowTasksExecutor.java | 4 +- .../TransportPutAutoFollowPatternAction.java | 8 +- .../action/TransportResumeFollowAction.java | 72 ++- .../elasticsearch/xpack/CcrIntegTestCase.java | 4 +- .../xpack/CcrSingleNodeTestCase.java | 6 +- .../elasticsearch/xpack/ccr/AutoFollowIT.java | 34 +- .../xpack/ccr/AutoFollowMetadataTests.java | 2 + .../elasticsearch/xpack/ccr/CcrLicenseIT.java | 2 +- .../xpack/ccr/IndexFollowingIT.java | 41 +- .../action/AutoFollowCoordinatorTests.java | 26 +- .../GetAutoFollowPatternResponseTests.java | 2 + .../PutAutoFollowPatternRequestTests.java | 6 +- .../ResumeFollowActionRequestTests.java | 10 +- .../ShardFollowNodeTaskRandomTests.java | 14 +- .../ShardFollowNodeTaskStatusTests.java | 36 +- .../ccr/action/ShardFollowNodeTaskTests.java | 386 +++++++++------ .../ShardFollowTaskReplicationTests.java | 4 +- .../ccr/action/ShardFollowTaskTests.java | 2 + ...ortDeleteAutoFollowPatternActionTests.java | 6 +- ...nsportGetAutoFollowPatternActionTests.java | 4 +- ...nsportPutAutoFollowPatternActionTests.java | 2 +- .../action/TransportUnfollowActionTests.java | 4 +- .../ccr/FollowStatsMonitoringDocTests.java | 92 ++-- .../xpack/core/ccr/AutoFollowMetadata.java | 154 +++--- .../core/ccr/ShardFollowNodeTaskStatus.java | 464 +++++++++--------- .../action/PutAutoFollowPatternAction.java | 126 +++-- .../core/ccr/action/PutFollowAction.java | 36 +- .../core/ccr/action/ResumeFollowAction.java | 195 +++++--- .../src/main/resources/monitoring-es.json | 34 +- 36 files changed, 1214 insertions(+), 917 deletions(-) diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml index e0f058080cb..4d4026f46a4 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml @@ -25,7 +25,7 @@ body: remote_cluster: local leader_index_patterns: ['logs-*'] - max_concurrent_read_batches: 2 + max_outstanding_read_requests: 2 - is_true: acknowledged - do: @@ -33,13 +33,13 @@ name: my_pattern - match: { my_pattern.remote_cluster: 'local' } - match: { my_pattern.leader_index_patterns: ['logs-*'] } - - match: { my_pattern.max_concurrent_read_batches: 2 } + - match: { my_pattern.max_outstanding_read_requests: 2 } - do: ccr.get_auto_follow_pattern: {} - match: { my_pattern.remote_cluster: 'local' } - match: { my_pattern.leader_index_patterns: ['logs-*'] } - - match: { my_pattern.max_concurrent_read_batches: 2 } + - match: { my_pattern.max_outstanding_read_requests: 2 } - do: ccr.delete_auto_follow_pattern: diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml index 29bb68369d8..97c538b60bc 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/follow_stats.yml @@ -56,21 +56,21 @@ - gte: { indices.0.shards.0.follower_global_checkpoint: -1 } - gte: { indices.0.shards.0.follower_max_seq_no: -1 } - gte: { indices.0.shards.0.last_requested_seq_no: -1 } - - gte: { indices.0.shards.0.number_of_concurrent_reads: 0 } - - match: { indices.0.shards.0.number_of_concurrent_writes: 0 } - - match: { indices.0.shards.0.number_of_queued_writes: 0 } - - gte: { indices.0.shards.0.mapping_version: 0 } - - gte: { indices.0.shards.0.total_fetch_time_millis: 0 } - - gte: { indices.0.shards.0.number_of_successful_fetches: 0 } - - gte: { indices.0.shards.0.number_of_failed_fetches: 0 } - - match: { indices.0.shards.0.operations_received: 0 } - - match: { indices.0.shards.0.total_transferred_bytes: 0 } - - match: { indices.0.shards.0.total_index_time_millis: 0 } - - match: { indices.0.shards.0.number_of_successful_bulk_operations: 0 } - - match: { indices.0.shards.0.number_of_failed_bulk_operations: 0 } - - match: { indices.0.shards.0.number_of_operations_indexed: 0 } - - length: { indices.0.shards.0.fetch_exceptions: 0 } - - gte: { indices.0.shards.0.time_since_last_fetch_millis: -1 } + - gte: { indices.0.shards.0.outstanding_read_requests: 0 } + - match: { indices.0.shards.0.outstanding_write_requests: 0 } + - match: { indices.0.shards.0.write_buffer_operation_count: 0 } + - gte: { indices.0.shards.0.follower_mapping_version: 0 } + - gte: { indices.0.shards.0.total_read_time_millis: 0 } + - gte: { indices.0.shards.0.successful_read_requests: 0 } + - gte: { indices.0.shards.0.failed_read_requests: 0 } + - match: { indices.0.shards.0.operations_read: 0 } + - match: { indices.0.shards.0.bytes_read: 0 } + - match: { indices.0.shards.0.total_write_time_millis: 0 } + - match: { indices.0.shards.0.successful_write_requests: 0 } + - match: { indices.0.shards.0.failed_write_requests: 0 } + - match: { indices.0.shards.0.operations_written: 0 } + - length: { indices.0.shards.0.read_exceptions: 0 } + - gte: { indices.0.shards.0.time_since_last_read_millis: -1 } - do: ccr.pause_follow: diff --git a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java index 3b8951b343d..14780702fc4 100644 --- a/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java +++ b/x-pack/plugin/ccr/qa/src/main/java/org/elasticsearch/xpack/ccr/ESCCRRestTestCase.java @@ -59,7 +59,7 @@ public class ESCCRRestTestCase extends ESRestTestCase { protected static void resumeFollow(String followIndex) throws IOException { final Request request = new Request("POST", "/" + followIndex + "/_ccr/resume_follow"); - request.setJsonEntity("{\"poll_timeout\": \"10ms\"}"); + request.setJsonEntity("{\"read_poll_timeout\": \"10ms\"}"); assertOK(client().performRequest(request)); } @@ -74,7 +74,7 @@ public class ESCCRRestTestCase extends ESRestTestCase { protected static void followIndex(RestClient client, String leaderCluster, String leaderIndex, String followIndex) throws IOException { final Request request = new Request("PUT", "/" + followIndex + "/_ccr/follow"); request.setJsonEntity("{\"remote_cluster\": \"" + leaderCluster + "\", \"leader_index\": \"" + leaderIndex + - "\", \"poll_timeout\": \"10ms\"}"); + "\", \"read_poll_timeout\": \"10ms\"}"); assertOK(client.performRequest(request)); } @@ -136,10 +136,10 @@ public class ESCCRRestTestCase extends ESRestTestCase { assertThat(followerIndex, equalTo(expectedFollowerIndex)); int foundNumberOfOperationsReceived = - (int) XContentMapValues.extractValue("_source.ccr_stats.operations_received", hit); + (int) XContentMapValues.extractValue("_source.ccr_stats.operations_read", hit); numberOfOperationsReceived = Math.max(numberOfOperationsReceived, foundNumberOfOperationsReceived); int foundNumberOfOperationsIndexed = - (int) XContentMapValues.extractValue("_source.ccr_stats.number_of_operations_indexed", hit); + (int) XContentMapValues.extractValue("_source.ccr_stats.operations_written", hit); numberOfOperationsIndexed = Math.max(numberOfOperationsIndexed, foundNumberOfOperationsIndexed); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index b5ba39ae7e2..b32ed829cf4 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -324,14 +324,16 @@ public class AutoFollowCoordinator implements ClusterStateApplier { ResumeFollowAction.Request followRequest = new ResumeFollowAction.Request(); followRequest.setFollowerIndex(followIndexName); - followRequest.setMaxBatchOperationCount(pattern.getMaxBatchOperationCount()); - followRequest.setMaxConcurrentReadBatches(pattern.getMaxConcurrentReadBatches()); - followRequest.setMaxBatchSize(pattern.getMaxBatchSize()); - followRequest.setMaxConcurrentWriteBatches(pattern.getMaxConcurrentWriteBatches()); + followRequest.setMaxReadRequestOperationCount(pattern.getMaxReadRequestOperationCount()); + followRequest.setMaxReadRequestSize(pattern.getMaxReadRequestSize()); + followRequest.setMaxOutstandingReadRequests(pattern.getMaxOutstandingReadRequests()); + followRequest.setMaxWriteRequestOperationCount(pattern.getMaxWriteRequestOperationCount()); + followRequest.setMaxWriteRequestSize(pattern.getMaxWriteRequestSize()); + followRequest.setMaxOutstandingWriteRequests(pattern.getMaxOutstandingWriteRequests()); followRequest.setMaxWriteBufferCount(pattern.getMaxWriteBufferCount()); followRequest.setMaxWriteBufferSize(pattern.getMaxWriteBufferSize()); followRequest.setMaxRetryDelay(pattern.getMaxRetryDelay()); - followRequest.setPollTimeout(pattern.getPollTimeout()); + followRequest.setReadPollTimeout(pattern.getPollTimeout()); PutFollowAction.Request request = new PutFollowAction.Request(); request.setRemoteCluster(remoteCluster); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 611d3197869..cf54a236a04 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -65,8 +65,8 @@ public class ShardChangesAction extends Action { private int maxOperationCount; private ShardId shardId; private String expectedHistoryUUID; - private TimeValue pollTimeout = TransportResumeFollowAction.DEFAULT_POLL_TIMEOUT; - private ByteSizeValue maxBatchSize = TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE; + private TimeValue pollTimeout = TransportResumeFollowAction.DEFAULT_READ_POLL_TIMEOUT; + private ByteSizeValue maxBatchSize = TransportResumeFollowAction.DEFAULT_MAX_READ_REQUEST_SIZE; private long relativeStartNanos; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 9788195c7e5..8c302344ad8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -70,19 +70,19 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private long lastRequestedSeqNo; private long followerGlobalCheckpoint = 0; private long followerMaxSeqNo = 0; - private int numConcurrentReads = 0; - private int numConcurrentWrites = 0; + private int numOutstandingReads = 0; + private int numOutstandingWrites = 0; private long currentMappingVersion = 0; - private long totalFetchTookTimeMillis = 0; - private long totalFetchTimeMillis = 0; - private long numberOfSuccessfulFetches = 0; - private long numberOfFailedFetches = 0; - private long operationsReceived = 0; - private long totalTransferredBytes = 0; - private long totalIndexTimeMillis = 0; - private long numberOfSuccessfulBulkOperations = 0; - private long numberOfFailedBulkOperations = 0; - private long numberOfOperationsIndexed = 0; + private long totalReadRemoteExecTimeMillis = 0; + private long totalReadTimeMillis = 0; + private long successfulReadRequests = 0; + private long failedReadRequests = 0; + private long operationsRead = 0; + private long bytesRead = 0; + private long totalWriteTimeMillis = 0; + private long successfulWriteRequests = 0; + private long failedWriteRequests = 0; + private long operationWritten = 0; private long lastFetchTime = -1; private final Queue buffer = new PriorityQueue<>(Comparator.comparing(Translog.Operation::seqNo)); private long bufferSizeInBytes = 0; @@ -104,7 +104,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { this.fetchExceptions = new LinkedHashMap>() { @Override protected boolean removeEldestEntry(final Map.Entry> eldest) { - return size() > params.getMaxConcurrentReadBatches(); + return size() > params.getMaxOutstandingReadRequests(); } }; } @@ -129,12 +129,12 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { } // updates follower mapping, this gets us the leader mapping version and makes sure that leader and follower mapping are identical - updateMapping(mappingVersion -> { + updateMapping(followerMappingVersion -> { synchronized (ShardFollowNodeTask.this) { - currentMappingVersion = mappingVersion; + currentMappingVersion = followerMappingVersion; } - LOGGER.info("{} Started to follow leader shard {}, followGlobalCheckPoint={}, mappingVersion={}", - params.getFollowShardId(), params.getLeaderShardId(), followerGlobalCheckpoint, mappingVersion); + LOGGER.info("{} Started to follow leader shard {}, followGlobalCheckPoint={}, followerMappingVersion={}", + params.getFollowShardId(), params.getLeaderShardId(), followerGlobalCheckpoint, followerMappingVersion); coordinateReads(); }); } @@ -147,41 +147,41 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { LOGGER.trace("{} coordinate reads, lastRequestedSeqNo={}, leaderGlobalCheckpoint={}", params.getFollowShardId(), lastRequestedSeqNo, leaderGlobalCheckpoint); - final int maxBatchOperationCount = params.getMaxBatchOperationCount(); + final int maxReadRequestOperationCount = params.getMaxReadRequestOperationCount(); while (hasReadBudget() && lastRequestedSeqNo < leaderGlobalCheckpoint) { final long from = lastRequestedSeqNo + 1; - final long maxRequiredSeqNo = Math.min(leaderGlobalCheckpoint, from + maxBatchOperationCount - 1); - final int requestBatchCount; - if (numConcurrentReads == 0) { + final long maxRequiredSeqNo = Math.min(leaderGlobalCheckpoint, from + maxReadRequestOperationCount - 1); + final int requestOpCount; + if (numOutstandingReads == 0) { // This is the only request, we can optimistically fetch more documents if possible but not enforce max_required_seqno. - requestBatchCount = maxBatchOperationCount; + requestOpCount = maxReadRequestOperationCount; } else { - requestBatchCount = Math.toIntExact(maxRequiredSeqNo - from + 1); + requestOpCount = Math.toIntExact(maxRequiredSeqNo - from + 1); } - assert 0 < requestBatchCount && requestBatchCount <= maxBatchOperationCount : "request_batch_count=" + requestBatchCount; + assert 0 < requestOpCount && requestOpCount <= maxReadRequestOperationCount : "read_request_operation_count=" + requestOpCount; LOGGER.trace("{}[{} ongoing reads] read from_seqno={} max_required_seqno={} batch_count={}", - params.getFollowShardId(), numConcurrentReads, from, maxRequiredSeqNo, requestBatchCount); - numConcurrentReads++; - sendShardChangesRequest(from, requestBatchCount, maxRequiredSeqNo); + params.getFollowShardId(), numOutstandingReads, from, maxRequiredSeqNo, requestOpCount); + numOutstandingReads++; + sendShardChangesRequest(from, requestOpCount, maxRequiredSeqNo); lastRequestedSeqNo = maxRequiredSeqNo; } - if (numConcurrentReads == 0 && hasReadBudget()) { + if (numOutstandingReads == 0 && hasReadBudget()) { assert lastRequestedSeqNo == leaderGlobalCheckpoint; // We sneak peek if there is any thing new in the leader. // If there is we will happily accept - numConcurrentReads++; + numOutstandingReads++; long from = lastRequestedSeqNo + 1; - LOGGER.trace("{}[{}] peek read [{}]", params.getFollowShardId(), numConcurrentReads, from); - sendShardChangesRequest(from, maxBatchOperationCount, lastRequestedSeqNo); + LOGGER.trace("{}[{}] peek read [{}]", params.getFollowShardId(), numOutstandingReads, from); + sendShardChangesRequest(from, maxReadRequestOperationCount, lastRequestedSeqNo); } } private boolean hasReadBudget() { assert Thread.holdsLock(this); - if (numConcurrentReads >= params.getMaxConcurrentReadBatches()) { + if (numOutstandingReads >= params.getMaxOutstandingReadRequests()) { LOGGER.trace("{} no new reads, maximum number of concurrent reads have been reached [{}]", - params.getFollowShardId(), numConcurrentReads); + params.getFollowShardId(), numOutstandingReads); return false; } if (bufferSizeInBytes >= params.getMaxWriteBufferSize().getBytes()) { @@ -203,19 +203,19 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { while (hasWriteBudget() && buffer.isEmpty() == false) { long sumEstimatedSize = 0L; - int length = Math.min(params.getMaxBatchOperationCount(), buffer.size()); + int length = Math.min(params.getMaxWriteRequestOperationCount(), buffer.size()); List ops = new ArrayList<>(length); for (int i = 0; i < length; i++) { Translog.Operation op = buffer.remove(); ops.add(op); sumEstimatedSize += op.estimateSize(); - if (sumEstimatedSize > params.getMaxBatchSize().getBytes()) { + if (sumEstimatedSize > params.getMaxWriteRequestSize().getBytes()) { break; } } bufferSizeInBytes -= sumEstimatedSize; - numConcurrentWrites++; - LOGGER.trace("{}[{}] write [{}/{}] [{}]", params.getFollowShardId(), numConcurrentWrites, ops.get(0).seqNo(), + numOutstandingWrites++; + LOGGER.trace("{}[{}] write [{}/{}] [{}]", params.getFollowShardId(), numOutstandingWrites, ops.get(0).seqNo(), ops.get(ops.size() - 1).seqNo(), ops.size()); sendBulkShardOperationsRequest(ops, leaderMaxSeqNoOfUpdatesOrDeletes, new AtomicInteger(0)); } @@ -223,9 +223,9 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private boolean hasWriteBudget() { assert Thread.holdsLock(this); - if (numConcurrentWrites >= params.getMaxConcurrentWriteBatches()) { + if (numOutstandingWrites >= params.getMaxOutstandingWriteRequests()) { LOGGER.trace("{} maximum number of concurrent writes have been reached [{}]", - params.getFollowShardId(), numConcurrentWrites); + params.getFollowShardId(), numOutstandingWrites); return false; } return true; @@ -247,11 +247,11 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { fetchExceptions.remove(from); if (response.getOperations().length > 0) { // do not count polls against fetch stats - totalFetchTookTimeMillis += response.getTookInMillis(); - totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); - numberOfSuccessfulFetches++; - operationsReceived += response.getOperations().length; - totalTransferredBytes += + totalReadRemoteExecTimeMillis += response.getTookInMillis(); + totalReadTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); + successfulReadRequests++; + operationsRead += response.getOperations().length; + bytesRead += Arrays.stream(response.getOperations()).mapToLong(Translog.Operation::estimateSize).sum(); } } @@ -259,8 +259,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { }, e -> { synchronized (ShardFollowNodeTask.this) { - totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); - numberOfFailedFetches++; + totalReadTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); + failedReadRequests++; fetchExceptions.put(from, Tuple.tuple(retryCounter, ExceptionsHelper.convertToElastic(e))); } handleFailure(e, retryCounter, () -> sendShardChangesRequest(from, maxOperationCount, maxRequiredSeqNo, retryCounter)); @@ -310,7 +310,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { sendShardChangesRequest(newFromSeqNo, newSize, maxRequiredSeqNo); } else { // read is completed, decrement - numConcurrentReads--; + numOutstandingReads--; coordinateReads(); } } @@ -322,16 +322,16 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { innerSendBulkShardOperationsRequest(followerHistoryUUID, operations, leaderMaxSeqNoOfUpdatesOrDeletes, response -> { synchronized (ShardFollowNodeTask.this) { - totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); - numberOfSuccessfulBulkOperations++; - numberOfOperationsIndexed += operations.size(); + totalWriteTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); + successfulWriteRequests++; + operationWritten += operations.size(); } handleWriteResponse(response); }, e -> { synchronized (ShardFollowNodeTask.this) { - totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); - numberOfFailedBulkOperations++; + totalWriteTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); + failedWriteRequests++; } handleFailure(e, retryCounter, () -> sendBulkShardOperationsRequest(operations, leaderMaxSeqNoOfUpdatesOrDeletes, retryCounter)); @@ -342,8 +342,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private synchronized void handleWriteResponse(final BulkShardOperationsResponse response) { this.followerGlobalCheckpoint = Math.max(this.followerGlobalCheckpoint, response.getGlobalCheckpoint()); this.followerMaxSeqNo = Math.max(this.followerMaxSeqNo, response.getMaxSeqNo()); - numConcurrentWrites--; - assert numConcurrentWrites >= 0; + numOutstandingWrites--; + assert numOutstandingWrites >= 0; coordinateWrites(); // In case that buffer has more ops than is allowed then reads may all have been stopped, @@ -380,7 +380,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { int currentRetry = retryCounter.incrementAndGet(); LOGGER.debug(new ParameterizedMessage("{} error during follow shard task, retrying [{}]", params.getFollowShardId(), currentRetry), e); - long delay = computeDelay(currentRetry, params.getPollTimeout().getMillis()); + long delay = computeDelay(currentRetry, params.getReadPollTimeout().getMillis()); scheduler.accept(TimeValue.timeValueMillis(delay), task); } else { fatalException = ExceptionsHelper.convertToElastic(e); @@ -463,21 +463,21 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { followerGlobalCheckpoint, followerMaxSeqNo, lastRequestedSeqNo, - numConcurrentReads, - numConcurrentWrites, + numOutstandingReads, + numOutstandingWrites, buffer.size(), bufferSizeInBytes, currentMappingVersion, - totalFetchTimeMillis, - totalFetchTookTimeMillis, - numberOfSuccessfulFetches, - numberOfFailedFetches, - operationsReceived, - totalTransferredBytes, - totalIndexTimeMillis, - numberOfSuccessfulBulkOperations, - numberOfFailedBulkOperations, - numberOfOperationsIndexed, + totalReadTimeMillis, + totalReadRemoteExecTimeMillis, + successfulReadRequests, + failedReadRequests, + operationsRead, + bytesRead, + totalWriteTimeMillis, + successfulWriteRequests, + failedWriteRequests, + operationWritten, new TreeMap<>( fetchExceptions .entrySet() diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java index 13e3da77491..f22fe0d2238 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java @@ -44,20 +44,23 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { static final ParseField LEADER_SHARD_INDEX_UUID_FIELD = new ParseField("leader_shard_index_uuid"); static final ParseField LEADER_SHARD_SHARDID_FIELD = new ParseField("leader_shard_shard"); static final ParseField HEADERS = new ParseField("headers"); - public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); - public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); - public static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); - public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + public static final ParseField MAX_READ_REQUEST_OPERATION_COUNT = new ParseField("max_read_request_operation_count"); + public static final ParseField MAX_READ_REQUEST_SIZE = new ParseField("max_read_request_size"); + public static final ParseField MAX_OUTSTANDING_READ_REQUESTS = new ParseField("max_outstanding_read_requests"); + public static final ParseField MAX_WRITE_REQUEST_OPERATION_COUNT = new ParseField("max_write_request_operation_count"); + public static final ParseField MAX_WRITE_REQUEST_SIZE = new ParseField("max_write_request_size"); + public static final ParseField MAX_OUTSTANDING_WRITE_REQUESTS = new ParseField("max_outstanding_write_requests"); public static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count"); public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); - public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); + public static final ParseField READ_POLL_TIMEOUT = new ParseField("read_poll_timeout"); @SuppressWarnings("unchecked") private static ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - (a) -> new ShardFollowTask((String) a[0], new ShardId((String) a[1], (String) a[2], (int) a[3]), - new ShardId((String) a[4], (String) a[5], (int) a[6]), (int) a[7], (int) a[8], (ByteSizeValue) a[9], - (int) a[10], (int) a[11], (ByteSizeValue) a[12], (TimeValue) a[13], (TimeValue) a[14], (Map) a[15])); + (a) -> new ShardFollowTask((String) a[0], + new ShardId((String) a[1], (String) a[2], (int) a[3]), new ShardId((String) a[4], (String) a[5], (int) a[6]), + (int) a[7], (ByteSizeValue) a[8], (int) a[9], (int) a[10], (ByteSizeValue) a[11], (int) a[12], + (int) a[13], (ByteSizeValue) a[14], (TimeValue) a[15], (TimeValue) a[16], (Map) a[17])); static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REMOTE_CLUSTER_FIELD); @@ -67,14 +70,18 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_SHARD_INDEX_FIELD); PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_SHARD_INDEX_UUID_FIELD); PARSER.declareInt(ConstructingObjectParser.constructorArg(), LEADER_SHARD_SHARDID_FIELD); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_BATCH_OPERATION_COUNT); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_CONCURRENT_READ_BATCHES); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_READ_REQUEST_OPERATION_COUNT); PARSER.declareField( ConstructingObjectParser.constructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), - MAX_BATCH_SIZE, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_READ_REQUEST_SIZE.getPreferredName()), MAX_READ_REQUEST_SIZE, ObjectParser.ValueType.STRING); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_CONCURRENT_WRITE_BATCHES); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_OUTSTANDING_READ_REQUESTS); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_WRITE_REQUEST_OPERATION_COUNT); + PARSER.declareField( + ConstructingObjectParser.constructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_BUFFER_SIZE.getPreferredName()), MAX_WRITE_REQUEST_SIZE, + ObjectParser.ValueType.STRING); + PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_OUTSTANDING_WRITE_REQUESTS); PARSER.declareInt(ConstructingObjectParser.constructorArg(), MAX_WRITE_BUFFER_COUNT); PARSER.declareField( ConstructingObjectParser.constructorArg(), @@ -85,48 +92,54 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY.getPreferredName()), MAX_RETRY_DELAY, ObjectParser.ValueType.STRING); PARSER.declareField(ConstructingObjectParser.constructorArg(), - (p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()), - POLL_TIMEOUT, ObjectParser.ValueType.STRING); + (p, c) -> TimeValue.parseTimeValue(p.text(), READ_POLL_TIMEOUT.getPreferredName()), + READ_POLL_TIMEOUT, ObjectParser.ValueType.STRING); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS); } private final String remoteCluster; private final ShardId followShardId; private final ShardId leaderShardId; - private final int maxBatchOperationCount; - private final int maxConcurrentReadBatches; - private final ByteSizeValue maxBatchSize; - private final int maxConcurrentWriteBatches; + private final int maxReadRequestOperationCount; + private final ByteSizeValue maxReadRequestSize; + private final int maxOutstandingReadRequests; + private final int maxWriteRequestOperationCount; + private final ByteSizeValue maxWriteRequestSize; + private final int maxOutstandingWriteRequests; private final int maxWriteBufferCount; private final ByteSizeValue maxWriteBufferSize; private final TimeValue maxRetryDelay; - private final TimeValue pollTimeout; + private final TimeValue readPollTimeout; private final Map headers; ShardFollowTask( final String remoteCluster, final ShardId followShardId, final ShardId leaderShardId, - final int maxBatchOperationCount, - final int maxConcurrentReadBatches, - final ByteSizeValue maxBatchSize, - final int maxConcurrentWriteBatches, + final int maxReadRequestOperationCount, + final ByteSizeValue maxReadRequestSize, + final int maxOutstandingReadRequests, + final int maxWriteRequestOperationCount, + final ByteSizeValue maxWriteRequestSize, + final int maxOutstandingWriteRequests, final int maxWriteBufferCount, final ByteSizeValue maxWriteBufferSize, final TimeValue maxRetryDelay, - final TimeValue pollTimeout, + final TimeValue readPollTimeout, final Map headers) { this.remoteCluster = remoteCluster; this.followShardId = followShardId; this.leaderShardId = leaderShardId; - this.maxBatchOperationCount = maxBatchOperationCount; - this.maxConcurrentReadBatches = maxConcurrentReadBatches; - this.maxBatchSize = maxBatchSize; - this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; + this.maxReadRequestOperationCount = maxReadRequestOperationCount; + this.maxReadRequestSize = maxReadRequestSize; + this.maxOutstandingReadRequests = maxOutstandingReadRequests; + this.maxWriteRequestOperationCount = maxWriteRequestOperationCount; + this.maxWriteRequestSize = maxWriteRequestSize; + this.maxOutstandingWriteRequests = maxOutstandingWriteRequests; this.maxWriteBufferCount = maxWriteBufferCount; this.maxWriteBufferSize = maxWriteBufferSize; this.maxRetryDelay = maxRetryDelay; - this.pollTimeout = pollTimeout; + this.readPollTimeout = readPollTimeout; this.headers = headers != null ? Collections.unmodifiableMap(headers) : Collections.emptyMap(); } @@ -134,14 +147,16 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { this.remoteCluster = in.readString(); this.followShardId = ShardId.readShardId(in); this.leaderShardId = ShardId.readShardId(in); - this.maxBatchOperationCount = in.readVInt(); - this.maxConcurrentReadBatches = in.readVInt(); - this.maxBatchSize = new ByteSizeValue(in); - this.maxConcurrentWriteBatches = in.readVInt(); + this.maxReadRequestOperationCount = in.readVInt(); + this.maxReadRequestSize = new ByteSizeValue(in); + this.maxOutstandingReadRequests = in.readVInt(); + this.maxWriteRequestOperationCount = in.readVInt(); + this.maxWriteRequestSize = new ByteSizeValue(in); + this.maxOutstandingWriteRequests = in.readVInt(); this.maxWriteBufferCount = in.readVInt(); this.maxWriteBufferSize = new ByteSizeValue(in); this.maxRetryDelay = in.readTimeValue(); - this.pollTimeout = in.readTimeValue(); + this.readPollTimeout = in.readTimeValue(); this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); } @@ -157,16 +172,24 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { return leaderShardId; } - public int getMaxBatchOperationCount() { - return maxBatchOperationCount; + public int getMaxReadRequestOperationCount() { + return maxReadRequestOperationCount; } - public int getMaxConcurrentReadBatches() { - return maxConcurrentReadBatches; + public int getMaxOutstandingReadRequests() { + return maxOutstandingReadRequests; } - public int getMaxConcurrentWriteBatches() { - return maxConcurrentWriteBatches; + public int getMaxWriteRequestOperationCount() { + return maxWriteRequestOperationCount; + } + + public ByteSizeValue getMaxWriteRequestSize() { + return maxWriteRequestSize; + } + + public int getMaxOutstandingWriteRequests() { + return maxOutstandingWriteRequests; } public int getMaxWriteBufferCount() { @@ -177,16 +200,16 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { return maxWriteBufferSize; } - public ByteSizeValue getMaxBatchSize() { - return maxBatchSize; + public ByteSizeValue getMaxReadRequestSize() { + return maxReadRequestSize; } public TimeValue getMaxRetryDelay() { return maxRetryDelay; } - public TimeValue getPollTimeout() { - return pollTimeout; + public TimeValue getReadPollTimeout() { + return readPollTimeout; } public String getTaskId() { @@ -207,14 +230,16 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { out.writeString(remoteCluster); followShardId.writeTo(out); leaderShardId.writeTo(out); - out.writeVLong(maxBatchOperationCount); - out.writeVInt(maxConcurrentReadBatches); - maxBatchSize.writeTo(out); - out.writeVInt(maxConcurrentWriteBatches); + out.writeVLong(maxReadRequestOperationCount); + maxReadRequestSize.writeTo(out); + out.writeVInt(maxOutstandingReadRequests); + out.writeVLong(maxWriteRequestOperationCount); + maxWriteRequestSize.writeTo(out); + out.writeVInt(maxOutstandingWriteRequests); out.writeVInt(maxWriteBufferCount); maxWriteBufferSize.writeTo(out); out.writeTimeValue(maxRetryDelay); - out.writeTimeValue(pollTimeout); + out.writeTimeValue(readPollTimeout); out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString); } @@ -232,14 +257,16 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { builder.field(LEADER_SHARD_INDEX_FIELD.getPreferredName(), leaderShardId.getIndex().getName()); builder.field(LEADER_SHARD_INDEX_UUID_FIELD.getPreferredName(), leaderShardId.getIndex().getUUID()); builder.field(LEADER_SHARD_SHARDID_FIELD.getPreferredName(), leaderShardId.id()); - builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); - builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); - builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); - builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); + builder.field(MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName(), maxReadRequestOperationCount); + builder.field(MAX_READ_REQUEST_SIZE.getPreferredName(), maxReadRequestSize.getStringRep()); + builder.field(MAX_OUTSTANDING_READ_REQUESTS.getPreferredName(), maxOutstandingReadRequests); + builder.field(MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName(), maxWriteRequestOperationCount); + builder.field(MAX_WRITE_REQUEST_SIZE.getPreferredName(), maxWriteRequestSize.getStringRep()); + builder.field(MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName(), maxOutstandingWriteRequests); builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep()); builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay.getStringRep()); - builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); + builder.field(READ_POLL_TIMEOUT.getPreferredName(), readPollTimeout.getStringRep()); builder.field(HEADERS.getPreferredName(), headers); return builder.endObject(); } @@ -252,14 +279,16 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { return Objects.equals(remoteCluster, that.remoteCluster) && Objects.equals(followShardId, that.followShardId) && Objects.equals(leaderShardId, that.leaderShardId) && - maxBatchOperationCount == that.maxBatchOperationCount && - maxConcurrentReadBatches == that.maxConcurrentReadBatches && - maxConcurrentWriteBatches == that.maxConcurrentWriteBatches && - maxBatchSize.equals(that.maxBatchSize) && + maxReadRequestOperationCount == that.maxReadRequestOperationCount && + maxReadRequestSize.equals(that.maxReadRequestSize) && + maxOutstandingReadRequests == that.maxOutstandingReadRequests && + maxWriteRequestOperationCount == that.maxWriteRequestOperationCount && + maxWriteRequestSize.equals(that.maxWriteRequestSize) && + maxOutstandingWriteRequests == that.maxOutstandingWriteRequests && maxWriteBufferCount == that.maxWriteBufferCount && maxWriteBufferSize.equals(that.maxWriteBufferSize) && Objects.equals(maxRetryDelay, that.maxRetryDelay) && - Objects.equals(pollTimeout, that.pollTimeout) && + Objects.equals(readPollTimeout, that.readPollTimeout) && Objects.equals(headers, that.headers); } @@ -269,14 +298,16 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { remoteCluster, followShardId, leaderShardId, - maxBatchOperationCount, - maxConcurrentReadBatches, - maxConcurrentWriteBatches, - maxBatchSize, + maxReadRequestOperationCount, + maxReadRequestSize, + maxOutstandingReadRequests, + maxWriteRequestOperationCount, + maxWriteRequestSize, + maxOutstandingWriteRequests, maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, - pollTimeout, + readPollTimeout, headers ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 128c2a846d8..5a82b45cf8c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -153,8 +153,8 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor { - static final ByteSizeValue DEFAULT_MAX_BATCH_SIZE = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES); + static final ByteSizeValue DEFAULT_MAX_READ_REQUEST_SIZE = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES); + static final ByteSizeValue DEFAULT_MAX_WRITE_REQUEST_SIZE = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES); private static final TimeValue DEFAULT_MAX_RETRY_DELAY = new TimeValue(500); - private static final int DEFAULT_MAX_CONCURRENT_WRITE_BATCHES = 9; + private static final int DEFAULT_MAX_OUTSTANDING_WRITE_REQUESTS = 9; private static final int DEFAULT_MAX_WRITE_BUFFER_COUNT = Integer.MAX_VALUE; private static final ByteSizeValue DEFAULT_MAX_WRITE_BUFFER_SIZE = new ByteSizeValue(512, ByteSizeUnit.MB); - private static final int DEFAULT_MAX_BATCH_OPERATION_COUNT = 5120; - private static final int DEFAULT_MAX_CONCURRENT_READ_BATCHES = 12; - static final TimeValue DEFAULT_POLL_TIMEOUT = TimeValue.timeValueMinutes(1); + private static final int DEFAULT_MAX_READ_REQUEST_OPERATION_COUNT = 5120; + private static final int DEFAULT_MAX_WRITE_REQUEST_OPERATION_COUNT = 5120; + private static final int DEFAULT_MAX_OUTSTANDING_READ_REQUESTS = 12; + static final TimeValue DEFAULT_READ_POLL_TIMEOUT = TimeValue.timeValueMinutes(1); private final Client client; private final ThreadPool threadPool; @@ -232,32 +234,46 @@ public class TransportResumeFollowAction extends HandledTransportAction filteredHeaders ) { - int maxBatchOperationCount; - if (request.getMaxBatchOperationCount() != null) { - maxBatchOperationCount = request.getMaxBatchOperationCount(); + int maxReadRequestOperationCount; + if (request.getMaxReadRequestOperationCount() != null) { + maxReadRequestOperationCount = request.getMaxReadRequestOperationCount(); } else { - maxBatchOperationCount = DEFAULT_MAX_BATCH_OPERATION_COUNT; + maxReadRequestOperationCount = DEFAULT_MAX_READ_REQUEST_OPERATION_COUNT; } - int maxConcurrentReadBatches; - if (request.getMaxConcurrentReadBatches() != null){ - maxConcurrentReadBatches = request.getMaxConcurrentReadBatches(); + ByteSizeValue maxReadRequestSize; + if (request.getMaxReadRequestSize() != null) { + maxReadRequestSize = request.getMaxReadRequestSize(); } else { - maxConcurrentReadBatches = DEFAULT_MAX_CONCURRENT_READ_BATCHES; + maxReadRequestSize = DEFAULT_MAX_READ_REQUEST_SIZE; } - ByteSizeValue maxBatchSize; - if (request.getMaxBatchSize() != null) { - maxBatchSize = request.getMaxBatchSize(); + int maxOutstandingReadRequests; + if (request.getMaxOutstandingReadRequests() != null){ + maxOutstandingReadRequests = request.getMaxOutstandingReadRequests(); } else { - maxBatchSize = DEFAULT_MAX_BATCH_SIZE; + maxOutstandingReadRequests = DEFAULT_MAX_OUTSTANDING_READ_REQUESTS; } - int maxConcurrentWriteBatches; - if (request.getMaxConcurrentWriteBatches() != null) { - maxConcurrentWriteBatches = request.getMaxConcurrentWriteBatches(); + final int maxWriteRequestOperationCount; + if (request.getMaxWriteRequestOperationCount() != null) { + maxWriteRequestOperationCount = request.getMaxWriteRequestOperationCount(); } else { - maxConcurrentWriteBatches = DEFAULT_MAX_CONCURRENT_WRITE_BATCHES; + maxWriteRequestOperationCount = DEFAULT_MAX_WRITE_REQUEST_OPERATION_COUNT; + } + + final ByteSizeValue maxWriteRequestSize; + if (request.getMaxWriteRequestSize() != null) { + maxWriteRequestSize = request.getMaxWriteRequestSize(); + } else { + maxWriteRequestSize = DEFAULT_MAX_WRITE_REQUEST_SIZE; + } + + int maxOutstandingWriteRequests; + if (request.getMaxOutstandingWriteRequests() != null) { + maxOutstandingWriteRequests = request.getMaxOutstandingWriteRequests(); + } else { + maxOutstandingWriteRequests = DEFAULT_MAX_OUTSTANDING_WRITE_REQUESTS; } int maxWriteBufferCount; @@ -275,20 +291,22 @@ public class TransportResumeFollowAction extends HandledTransportAction firstBatchNumDocsPerShard = new HashMap<>(); @@ -456,10 +459,10 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(response.getNodeFailures(), empty()); assertThat(response.getTaskFailures(), empty()); assertThat(response.getStatsResponses(), hasSize(1)); - assertThat(response.getStatsResponses().get(0).status().numberOfFailedFetches(), greaterThanOrEqualTo(1L)); - assertThat(response.getStatsResponses().get(0).status().fetchExceptions().size(), equalTo(1)); + assertThat(response.getStatsResponses().get(0).status().failedReadRequests(), greaterThanOrEqualTo(1L)); + assertThat(response.getStatsResponses().get(0).status().readExceptions().size(), equalTo(1)); ElasticsearchException exception = response.getStatsResponses().get(0).status() - .fetchExceptions().entrySet().iterator().next().getValue().v2(); + .readExceptions().entrySet().iterator().next().getValue().v2(); assertThat(exception.getRootCause().getMessage(), equalTo("blocked by: [FORBIDDEN/4/index closed];")); }); @@ -491,7 +494,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(response.getNodeFailures(), empty()); assertThat(response.getTaskFailures(), empty()); assertThat(response.getStatsResponses(), hasSize(1)); - assertThat(response.getStatsResponses().get(0).status().numberOfFailedBulkOperations(), greaterThanOrEqualTo(1L)); + assertThat(response.getStatsResponses().get(0).status().failedWriteRequests(), greaterThanOrEqualTo(1L)); }); followerClient().admin().indices().open(new OpenIndexRequest("index2")).actionGet(); assertBusy(() -> assertThat(followerClient().prepareSearch("index2").get().getHits().totalHits, equalTo(2L))); @@ -519,7 +522,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(response.getNodeFailures(), empty()); assertThat(response.getTaskFailures(), empty()); assertThat(response.getStatsResponses(), hasSize(1)); - assertThat(response.getStatsResponses().get(0).status().numberOfFailedFetches(), greaterThanOrEqualTo(1L)); + assertThat(response.getStatsResponses().get(0).status().failedReadRequests(), greaterThanOrEqualTo(1L)); ElasticsearchException fatalException = response.getStatsResponses().get(0).status().getFatalException(); assertThat(fatalException, notNullValue()); assertThat(fatalException.getRootCause().getMessage(), equalTo("no such index [index1]")); @@ -549,7 +552,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { assertThat(response.getNodeFailures(), empty()); assertThat(response.getTaskFailures(), empty()); assertThat(response.getStatsResponses(), hasSize(1)); - assertThat(response.getStatsResponses().get(0).status().numberOfFailedBulkOperations(), greaterThanOrEqualTo(1L)); + assertThat(response.getStatsResponses().get(0).status().failedWriteRequests(), greaterThanOrEqualTo(1L)); ElasticsearchException fatalException = response.getStatsResponses().get(0).status().getFatalException(); assertThat(fatalException, notNullValue()); assertThat(fatalException.getMessage(), equalTo("no such index [index2]")); @@ -613,6 +616,12 @@ public class IndexFollowingIT extends CcrIntegTestCase { threads[i].start(); } PutFollowAction.Request follow = putFollow("leader-index", "follower-index"); + follow.getFollowRequest().setMaxReadRequestOperationCount(randomIntBetween(32, 2048)); + follow.getFollowRequest().setMaxReadRequestSize(new ByteSizeValue(randomIntBetween(1, 4096), ByteSizeUnit.KB)); + follow.getFollowRequest().setMaxOutstandingReadRequests(randomIntBetween(1, 10)); + follow.getFollowRequest().setMaxWriteRequestOperationCount(randomIntBetween(32, 2048)); + follow.getFollowRequest().setMaxWriteRequestSize(new ByteSizeValue(randomIntBetween(1, 4096), ByteSizeUnit.KB)); + follow.getFollowRequest().setMaxOutstandingWriteRequests(randomIntBetween(1, 10)); followerClient().execute(PutFollowAction.INSTANCE, follow).get(); ensureFollowerGreen("follower-index"); atLeastDocsIndexed(followerClient(), "follower-index", between(20, 60)); @@ -971,7 +980,7 @@ public class IndexFollowingIT extends CcrIntegTestCase { ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setFollowerIndex(followerIndex); request.setMaxRetryDelay(TimeValue.timeValueMillis(10)); - request.setPollTimeout(TimeValue.timeValueMillis(10)); + request.setReadPollTimeout(TimeValue.timeValueMillis(10)); return request; } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 9c55617d210..6b542d15044 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -56,8 +56,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .numberOfReplicas(0))) .build(); - AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("logs-*"), + null, null, null, null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -120,8 +120,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { Client client = mock(Client.class); when(client.getRemoteClusterClient(anyString())).thenReturn(client); - AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("logs-*"), + null, null, null, null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -178,8 +178,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .numberOfReplicas(0))) .build(); - AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("logs-*"), + null, null, null, null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -241,8 +241,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .numberOfReplicas(0))) .build(); - AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null, null); + AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("logs-*"), + null, null, null, null, null, null, null, null, null, null, null); Map patterns = new HashMap<>(); patterns.put("remote", autoFollowPattern); Map> followedLeaderIndexUUIDS = new HashMap<>(); @@ -295,8 +295,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { } public void testGetLeaderIndicesToFollow() { - AutoFollowPattern autoFollowPattern = - new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null, null); + AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), + null, null, null, null, null, null, null, null, null, null, null); Map> headers = new HashMap<>(); ClusterState followerState = ClusterState.builder(new ClusterName("remote")) .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, @@ -341,15 +341,15 @@ public class AutoFollowCoordinatorTests extends ESTestCase { public void testGetFollowerIndexName() { AutoFollowPattern autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), null, null, - null, null, null, null, null, null, null); + null, null, null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("metrics-0")); autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-metrics-0", null, null, - null, null, null, null, null, null); + null, null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); autoFollowPattern = new AutoFollowPattern("remote", Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null, - null, null, null, null, null, null, null); + null, null, null, null, null, null, null, null, null); assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java index 301dabeef89..c74afd6075c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java @@ -33,6 +33,8 @@ public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCas Collections.singletonList(randomAlphaOfLength(4)), randomAlphaOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), + new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES), + randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE), new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES), randomIntBetween(0, Integer.MAX_VALUE), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java index e4e365312ad..3814e561b42 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java @@ -48,13 +48,13 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent request.setFollowIndexNamePattern(randomAlphaOfLength(4)); } if (randomBoolean()) { - request.setPollTimeout(TimeValue.timeValueMillis(500)); + request.setReadPollTimeout(TimeValue.timeValueMillis(500)); } if (randomBoolean()) { request.setMaxRetryDelay(TimeValue.timeValueMillis(500)); } if (randomBoolean()) { - request.setMaxBatchOperationCount(randomIntBetween(0, Integer.MAX_VALUE)); + request.setMaxReadRequestOperationCount(randomIntBetween(0, Integer.MAX_VALUE)); } if (randomBoolean()) { request.setMaxConcurrentReadBatches(randomIntBetween(0, Integer.MAX_VALUE)); @@ -63,7 +63,7 @@ public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContent request.setMaxConcurrentWriteBatches(randomIntBetween(0, Integer.MAX_VALUE)); } if (randomBoolean()) { - request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); + request.setMaxReadRequestSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { request.setMaxWriteBufferCount(randomIntBetween(0, Integer.MAX_VALUE)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java index 122082537fd..ae9bc1bbd33 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ResumeFollowActionRequestTests.java @@ -45,16 +45,16 @@ public class ResumeFollowActionRequestTests extends AbstractStreamableXContentTe ResumeFollowAction.Request request = new ResumeFollowAction.Request(); request.setFollowerIndex(randomAlphaOfLength(4)); if (randomBoolean()) { - request.setMaxBatchOperationCount(randomIntBetween(1, Integer.MAX_VALUE)); + request.setMaxReadRequestOperationCount(randomIntBetween(1, Integer.MAX_VALUE)); } if (randomBoolean()) { - request.setMaxConcurrentReadBatches(randomIntBetween(1, Integer.MAX_VALUE)); + request.setMaxOutstandingReadRequests(randomIntBetween(1, Integer.MAX_VALUE)); } if (randomBoolean()) { - request.setMaxConcurrentWriteBatches(randomIntBetween(1, Integer.MAX_VALUE)); + request.setMaxOutstandingWriteRequests(randomIntBetween(1, Integer.MAX_VALUE)); } if (randomBoolean()) { - request.setMaxBatchSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); + request.setMaxReadRequestSize(new ByteSizeValue(randomNonNegativeLong(), ByteSizeUnit.BYTES)); } if (randomBoolean()) { request.setMaxWriteBufferCount(randomIntBetween(1, Integer.MAX_VALUE)); @@ -66,7 +66,7 @@ public class ResumeFollowActionRequestTests extends AbstractStreamableXContentTe request.setMaxRetryDelay(TimeValue.timeValueMillis(500)); } if (randomBoolean()) { - request.setPollTimeout(TimeValue.timeValueMillis(500)); + request.setReadPollTimeout(TimeValue.timeValueMillis(500)); } return request; } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java index 8576bc28905..4df2bb498b6 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java @@ -60,17 +60,17 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { assertThat(status.followerGlobalCheckpoint(), equalTo(testRun.finalExpectedGlobalCheckpoint)); final long numberOfFailedFetches = testRun.responses.values().stream().flatMap(List::stream).filter(f -> f.exception != null).count(); - assertThat(status.numberOfFailedFetches(), equalTo(numberOfFailedFetches)); + assertThat(status.failedReadRequests(), equalTo(numberOfFailedFetches)); // the failures were able to be retried so fetch failures should have cleared - assertThat(status.fetchExceptions().entrySet(), hasSize(0)); - assertThat(status.mappingVersion(), equalTo(testRun.finalMappingVersion)); + assertThat(status.readExceptions().entrySet(), hasSize(0)); + assertThat(status.followerMappingVersion(), equalTo(testRun.finalMappingVersion)); }); task.markAsCompleted(); assertBusy(() -> { ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(0)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.outstandingReadRequests(), equalTo(0)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); }); } @@ -81,8 +81,10 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { new ShardId("follow_index", "", 0), new ShardId("leader_index", "", 0), testRun.maxOperationCount, + TransportResumeFollowAction.DEFAULT_MAX_READ_REQUEST_SIZE, concurrency, - TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE, + testRun.maxOperationCount, + TransportResumeFollowAction.DEFAULT_MAX_READ_REQUEST_SIZE, concurrency, 10240, new ByteSizeValue(512, ByteSizeUnit.MB), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java index 93d9556d0e4..95f8e86e096 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskStatusTests.java @@ -74,23 +74,23 @@ public class ShardFollowNodeTaskStatusTests extends AbstractSerializingTestCase< assertThat(newInstance.leaderMaxSeqNo(), equalTo(expectedInstance.leaderMaxSeqNo())); assertThat(newInstance.followerGlobalCheckpoint(), equalTo(expectedInstance.followerGlobalCheckpoint())); assertThat(newInstance.lastRequestedSeqNo(), equalTo(expectedInstance.lastRequestedSeqNo())); - assertThat(newInstance.numberOfConcurrentReads(), equalTo(expectedInstance.numberOfConcurrentReads())); - assertThat(newInstance.numberOfConcurrentWrites(), equalTo(expectedInstance.numberOfConcurrentWrites())); - assertThat(newInstance.numberOfQueuedWrites(), equalTo(expectedInstance.numberOfQueuedWrites())); - assertThat(newInstance.mappingVersion(), equalTo(expectedInstance.mappingVersion())); - assertThat(newInstance.totalFetchTimeMillis(), equalTo(expectedInstance.totalFetchTimeMillis())); - assertThat(newInstance.numberOfSuccessfulFetches(), equalTo(expectedInstance.numberOfSuccessfulFetches())); - assertThat(newInstance.numberOfFailedFetches(), equalTo(expectedInstance.numberOfFailedFetches())); - assertThat(newInstance.operationsReceived(), equalTo(expectedInstance.operationsReceived())); - assertThat(newInstance.totalTransferredBytes(), equalTo(expectedInstance.totalTransferredBytes())); - assertThat(newInstance.totalIndexTimeMillis(), equalTo(expectedInstance.totalIndexTimeMillis())); - assertThat(newInstance.numberOfSuccessfulBulkOperations(), equalTo(expectedInstance.numberOfSuccessfulBulkOperations())); - assertThat(newInstance.numberOfFailedBulkOperations(), equalTo(expectedInstance.numberOfFailedBulkOperations())); - assertThat(newInstance.numberOfOperationsIndexed(), equalTo(expectedInstance.numberOfOperationsIndexed())); - assertThat(newInstance.fetchExceptions().size(), equalTo(expectedInstance.fetchExceptions().size())); - assertThat(newInstance.fetchExceptions().keySet(), equalTo(expectedInstance.fetchExceptions().keySet())); - for (final Map.Entry> entry : newInstance.fetchExceptions().entrySet()) { - final Tuple expectedTuple = expectedInstance.fetchExceptions().get(entry.getKey()); + assertThat(newInstance.outstandingReadRequests(), equalTo(expectedInstance.outstandingReadRequests())); + assertThat(newInstance.outstandingWriteRequests(), equalTo(expectedInstance.outstandingWriteRequests())); + assertThat(newInstance.writeBufferOperationCount(), equalTo(expectedInstance.writeBufferOperationCount())); + assertThat(newInstance.followerMappingVersion(), equalTo(expectedInstance.followerMappingVersion())); + assertThat(newInstance.totalReadTimeMillis(), equalTo(expectedInstance.totalReadTimeMillis())); + assertThat(newInstance.successfulReadRequests(), equalTo(expectedInstance.successfulReadRequests())); + assertThat(newInstance.failedReadRequests(), equalTo(expectedInstance.failedReadRequests())); + assertThat(newInstance.operationsReads(), equalTo(expectedInstance.operationsReads())); + assertThat(newInstance.bytesRead(), equalTo(expectedInstance.bytesRead())); + assertThat(newInstance.totalWriteTimeMillis(), equalTo(expectedInstance.totalWriteTimeMillis())); + assertThat(newInstance.successfulWriteRequests(), equalTo(expectedInstance.successfulWriteRequests())); + assertThat(newInstance.failedWriteRequests(), equalTo(expectedInstance.failedWriteRequests())); + assertThat(newInstance.operationWritten(), equalTo(expectedInstance.operationWritten())); + assertThat(newInstance.readExceptions().size(), equalTo(expectedInstance.readExceptions().size())); + assertThat(newInstance.readExceptions().keySet(), equalTo(expectedInstance.readExceptions().keySet())); + for (final Map.Entry> entry : newInstance.readExceptions().entrySet()) { + final Tuple expectedTuple = expectedInstance.readExceptions().get(entry.getKey()); assertThat(entry.getValue().v1(), equalTo(expectedTuple.v1())); // x-content loses the exception final ElasticsearchException expected = expectedTuple.v2(); @@ -101,7 +101,7 @@ public class ShardFollowNodeTaskStatusTests extends AbstractSerializingTestCase< anyOf(instanceOf(ElasticsearchException.class), instanceOf(IllegalStateException.class))); assertThat(entry.getValue().v2().getCause().getMessage(), containsString(expected.getCause().getMessage())); } - assertThat(newInstance.timeSinceLastFetchMillis(), equalTo(expectedInstance.timeSinceLastFetchMillis())); + assertThat(newInstance.timeSinceLastReadMillis(), equalTo(expectedInstance.timeSinceLastReadMillis())); } @Override diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java index a1582d4c2f1..aeac0ac4518 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java @@ -63,8 +63,12 @@ public class ShardFollowNodeTaskTests extends ESTestCase { private Queue responseSizes; public void testCoordinateReads() { - ShardFollowNodeTask task = createShardFollowTask(8, between(8, 20), between(1, 20), Integer.MAX_VALUE, - new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 8; + params.maxOutstandingReadRequests = between(8, 20); + params.maxOutstandingWriteRequests = between(1, 20); + + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 3, -1); task.coordinateReads(); assertThat(shardChangesRequests, contains(new long[]{0L, 8L})); // treat this a peak request @@ -74,14 +78,17 @@ public class ShardFollowNodeTaskTests extends ESTestCase { {6L, 8L}, {14L, 8L}, {22L, 8L}, {30L, 8L}, {38L, 8L}, {46L, 8L}, {54L, 7L}} )); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(7)); + assertThat(status.outstandingReadRequests(), equalTo(7)); assertThat(status.lastRequestedSeqNo(), equalTo(60L)); } public void testMaxWriteBufferCount() { - // Need to set concurrentWrites to 0, other the write buffer gets flushed immediately: - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 0, 32, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 0; // need to set outstandingWrites to 0, other the write buffer gets flushed immediately + params.maxWriteBufferCount = 32; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -95,16 +102,19 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because write buffer count limit has been reached ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(0)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.outstandingReadRequests(), equalTo(0)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(128L)); } public void testMaxWriteBufferSize() { - // Need to set concurrentWrites to 0, other the write buffer gets flushed immediately: - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 0, Integer.MAX_VALUE, new ByteSizeValue(1, ByteSizeUnit.KB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 0; // need to set outstandingWrites to 0, other the write buffer gets flushed immediately + params.maxWriteBufferSize = new ByteSizeValue(1, ByteSizeUnit.KB); + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -118,15 +128,18 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.size(), equalTo(0)); // no more reads, because write buffer size limit has been reached ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(0)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.outstandingReadRequests(), equalTo(0)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(128L)); } - public void testMaxConcurrentReads() { - ShardFollowNodeTask task = - createShardFollowTask(8, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + public void testMaxOutstandingReads() { + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 8; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 64, -1); task.coordinateReads(); @@ -135,13 +148,16 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(8L)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); + assertThat(status.outstandingReadRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(7L)); } public void testTaskCancelled() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 64, -1); task.coordinateReads(); @@ -158,8 +174,11 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testTaskCancelledAfterReadLimitHasBeenReached() { - ShardFollowNodeTask task = - createShardFollowTask(16, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 16; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 31, -1); task.coordinateReads(); @@ -175,16 +194,21 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.size(), equalTo(0)); // no more writes, because task has been cancelled ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(0)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.outstandingReadRequests(), equalTo(0)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(15L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(31L)); assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); } public void testTaskCancelledAfterWriteBufferLimitHasBeenReached() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, 32, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + params.maxWriteBufferCount = 32; + + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 64, -1); task.coordinateReads(); @@ -200,16 +224,19 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.size(), equalTo(0)); // no more writes, because task has been cancelled ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(0)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.outstandingReadRequests(), equalTo(0)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(128L)); assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); } public void testReceiveRetryableError() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); int max = randomIntBetween(1, 30); @@ -224,10 +251,10 @@ public class ShardFollowNodeTaskTests extends ESTestCase { final AtomicLong retryCounter = new AtomicLong(); // before each retry, we assert the fetch failures; after the last retry, the fetch failure should clear beforeSendShardChangesRequest = status -> { - assertThat(status.numberOfFailedFetches(), equalTo(retryCounter.get())); + assertThat(status.failedReadRequests(), equalTo(retryCounter.get())); if (retryCounter.get() > 0) { - assertThat(status.fetchExceptions().entrySet(), hasSize(1)); - final Map.Entry> entry = status.fetchExceptions().entrySet().iterator().next(); + assertThat(status.readExceptions().entrySet(), hasSize(1)); + final Map.Entry> entry = status.readExceptions().entrySet().iterator().next(); assertThat(entry.getValue().v1(), equalTo(Math.toIntExact(retryCounter.get()))); assertThat(entry.getKey(), equalTo(0L)); assertThat(entry.getValue().v2(), instanceOf(ShardNotFoundException.class)); @@ -248,19 +275,22 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertFalse("task is not stopped", task.isStopped()); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); - assertThat(status.numberOfFailedFetches(), equalTo((long)max)); - assertThat(status.numberOfSuccessfulFetches(), equalTo(1L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); + assertThat(status.failedReadRequests(), equalTo((long)max)); + assertThat(status.successfulReadRequests(), equalTo(1L)); // the fetch failure has cleared - assertThat(status.fetchExceptions().entrySet(), hasSize(0)); + assertThat(status.readExceptions().entrySet(), hasSize(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); } public void testEmptyShardChangesResponseShouldClearFetchException() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, -1, -1); readFailures.add(new ShardNotFoundException(new ShardId("leader_index", "", 0))); @@ -279,18 +309,21 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertFalse("task is not stopped", task.isStopped()); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); - assertThat(status.numberOfFailedFetches(), equalTo(1L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); + assertThat(status.failedReadRequests(), equalTo(1L)); // the fetch failure should have been cleared: - assertThat(status.fetchExceptions().entrySet(), hasSize(0)); + assertThat(status.readExceptions().entrySet(), hasSize(0)); assertThat(status.lastRequestedSeqNo(), equalTo(-1L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(-1L)); } public void testReceiveTimeout() { - final ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + final ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); final int numberOfTimeouts = randomIntBetween(1, 32); @@ -304,14 +337,14 @@ public class ShardFollowNodeTaskTests extends ESTestCase { final AtomicInteger counter = new AtomicInteger(); beforeSendShardChangesRequest = status -> { if (counter.get() <= numberOfTimeouts) { - assertThat(status.numberOfSuccessfulFetches(), equalTo(0L)); - assertThat(status.totalFetchTimeMillis(), equalTo(0L)); - assertThat(status.operationsReceived(), equalTo(0L)); - assertThat(status.totalTransferredBytes(), equalTo(0L)); + assertThat(status.successfulReadRequests(), equalTo(0L)); + assertThat(status.totalReadTimeMillis(), equalTo(0L)); + assertThat(status.operationsReads(), equalTo(0L)); + assertThat(status.bytesRead(), equalTo(0L)); - assertThat(status.fetchExceptions().entrySet(), hasSize(0)); - assertThat(status.totalFetchTimeMillis(), equalTo(0L)); - assertThat(status.numberOfFailedFetches(), equalTo(0L)); + assertThat(status.readExceptions().entrySet(), hasSize(0)); + assertThat(status.totalReadTimeMillis(), equalTo(0L)); + assertThat(status.failedReadRequests(), equalTo(0L)); } else { // otherwise we will keep looping as if we were repeatedly polling and timing out simulateResponse.set(false); @@ -343,10 +376,10 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(lastShardChangesRequest[1], equalTo(64L)); final ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfSuccessfulFetches(), equalTo(1L)); - assertThat(status.numberOfFailedFetches(), equalTo(0L)); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.successfulReadRequests(), equalTo(1L)); + assertThat(status.failedReadRequests(), equalTo(0L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); assertThat(status.leaderMaxSeqNo(), equalTo(63L)); @@ -354,8 +387,11 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testReceiveNonRetryableError() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); Exception failure = new RuntimeException("replication failed"); @@ -364,8 +400,8 @@ public class ShardFollowNodeTaskTests extends ESTestCase { // since there will be only one failure, this should only be invoked once and there should not be a fetch failure beforeSendShardChangesRequest = status -> { if (invoked.compareAndSet(false, true)) { - assertThat(status.numberOfFailedFetches(), equalTo(0L)); - assertThat(status.fetchExceptions().entrySet(), hasSize(0)); + assertThat(status.failedReadRequests(), equalTo(0L)); + assertThat(status.readExceptions().entrySet(), hasSize(0)); } else { fail("invoked twice"); } @@ -379,11 +415,11 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertTrue("task is stopped", task.isStopped()); assertThat(task.getStatus().getFatalException().getRootCause(), sameInstance(failure)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); - assertThat(status.numberOfFailedFetches(), equalTo(1L)); - assertThat(status.fetchExceptions().entrySet(), hasSize(1)); - final Map.Entry> entry = status.fetchExceptions().entrySet().iterator().next(); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); + assertThat(status.failedReadRequests(), equalTo(1L)); + assertThat(status.readExceptions().entrySet(), hasSize(1)); + final Map.Entry> entry = status.readExceptions().entrySet().iterator().next(); assertThat(entry.getKey(), equalTo(0L)); assertThat(entry.getValue().v2(), instanceOf(ElasticsearchException.class)); assertNotNull(entry.getValue().v2().getCause()); @@ -395,8 +431,11 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testHandleReadResponse() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -407,18 +446,21 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.get(0), equalTo(Arrays.asList(response.getOperations()))); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.mappingVersion(), equalTo(0L)); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.followerMappingVersion(), equalTo(0L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); } public void testReceiveLessThanRequested() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -435,15 +477,18 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(43L)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); } public void testCancelAndReceiveLessThanRequested() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -459,15 +504,18 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.size(), equalTo(0)); assertThat(bulkShardOperationRequests.size(), equalTo(0)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(0)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.outstandingReadRequests(), equalTo(0)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); } public void testReceiveNothingExpectedSomething() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -483,15 +531,18 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); } public void testMappingUpdate() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); mappingVersions.add(1L); @@ -503,17 +554,20 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.get(0), equalTo(Arrays.asList(response.getOperations()))); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.mappingVersion(), equalTo(1L)); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.followerMappingVersion(), equalTo(1L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); } public void testMappingUpdateRetryableError() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); int max = randomIntBetween(1, 30); @@ -529,17 +583,20 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.size(), equalTo(1)); assertThat(task.isStopped(), equalTo(false)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.mappingVersion(), equalTo(1L)); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.followerMappingVersion(), equalTo(1L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); } public void testMappingUpdateNonRetryableError() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); mappingUpdateFailures.add(new RuntimeException()); @@ -550,22 +607,27 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.size(), equalTo(0)); assertThat(task.isStopped(), equalTo(true)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.mappingVersion(), equalTo(0L)); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(0)); + assertThat(status.followerMappingVersion(), equalTo(0L)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(0)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); } public void testCoordinateWrites() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 128; + params.maxOutstandingReadRequests = 1; + params.maxWriteRequestOperationCount = 64; + params.maxOutstandingWriteRequests = 1; + + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); assertThat(shardChangesRequests.size(), equalTo(1)); assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); - assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); + assertThat(shardChangesRequests.get(0)[1], equalTo(128L)); ShardChangesAction.Response response = generateShardChangesResponse(0, 63, 0L, 63L); // Also invokes coordinatesWrites() @@ -575,16 +637,18 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.get(0), equalTo(Arrays.asList(response.getOperations()))); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.outstandingReadRequests(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); } - public void testMaxConcurrentWrites() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 2, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + public void testMaxOutstandingWrites() { + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxWriteRequestOperationCount = 64; + params.maxOutstandingWriteRequests = 2; + ShardFollowNodeTask task = createShardFollowTask(params); ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -594,9 +658,10 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.get(1), equalTo(Arrays.asList(response.getOperations()).subList(64, 128))); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentWrites(), equalTo(2)); + assertThat(status.outstandingWriteRequests(), equalTo(2)); - task = createShardFollowTask(64, 1, 4, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + params.maxOutstandingWriteRequests = 4; // change to 4 outstanding writers + task = createShardFollowTask(params); response = generateShardChangesResponse(0, 256, 0L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -608,12 +673,14 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.get(3), equalTo(Arrays.asList(response.getOperations()).subList(192, 256))); status = task.getStatus(); - assertThat(status.numberOfConcurrentWrites(), equalTo(4)); + assertThat(status.outstandingWriteRequests(), equalTo(4)); } - public void testMaxBatchOperationCount() { - ShardFollowNodeTask task = - createShardFollowTask(8, 1, 32, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + public void testMaxWriteRequestCount() { + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxWriteRequestOperationCount = 8; + params.maxOutstandingWriteRequests = 32; + ShardFollowNodeTask task = createShardFollowTask(params); ShardChangesAction.Response response = generateShardChangesResponse(0, 256, 0L, 256L); // Also invokes coordinatesWrites() task.innerHandleReadResponse(0L, 64L, response); @@ -625,12 +692,15 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentWrites(), equalTo(32)); + assertThat(status.outstandingWriteRequests(), equalTo(32)); } public void testRetryableError() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -653,13 +723,16 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } assertThat(task.isStopped(), equalTo(false)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); } public void testNonRetryableError() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -676,13 +749,18 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(bulkShardOperationRequests.get(0), equalTo(Arrays.asList(response.getOperations()))); assertThat(task.isStopped(), equalTo(true)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentWrites(), equalTo(1)); + assertThat(status.outstandingWriteRequests(), equalTo(1)); assertThat(status.followerGlobalCheckpoint(), equalTo(-1L)); } - public void testMaxBatchBytesLimit() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 128, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), 1L); + public void testMaxWriteRequestSize() { + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxWriteRequestSize = new ByteSizeValue(1, ByteSizeUnit.BYTES); + params.maxOutstandingWriteRequests = 128; + + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 64, -1); task.coordinateReads(); @@ -698,8 +776,12 @@ public class ShardFollowNodeTaskTests extends ESTestCase { } public void testHandleWriteResponse() { - ShardFollowNodeTask task = - createShardFollowTask(64, 1, 1, Integer.MAX_VALUE, new ByteSizeValue(512, ByteSizeUnit.MB), Long.MAX_VALUE); + ShardFollowTaskParams params = new ShardFollowTaskParams(); + params.maxReadRequestOperationCount = 64; + params.maxOutstandingReadRequests = 1; + params.maxWriteRequestOperationCount = 64; + params.maxOutstandingWriteRequests = 1; + ShardFollowNodeTask task = createShardFollowTask(params); startTask(task, 63, -1); task.coordinateReads(); @@ -722,7 +804,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); ShardFollowNodeTaskStatus status = task.getStatus(); - assertThat(status.numberOfConcurrentReads(), equalTo(1)); + assertThat(status.outstandingReadRequests(), equalTo(1)); assertThat(status.lastRequestedSeqNo(), equalTo(63L)); assertThat(status.leaderGlobalCheckpoint(), equalTo(63L)); assertThat(status.followerGlobalCheckpoint(), equalTo(63L)); @@ -742,26 +824,40 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(ShardFollowNodeTask.computeDelay(1024, maxDelayInMillis), allOf(greaterThanOrEqualTo(0L), lessThanOrEqualTo(1000L))); } - private ShardFollowNodeTask createShardFollowTask(int maxBatchOperationCount, - int maxConcurrentReadBatches, - int maxConcurrentWriteBatches, - int maxWriteBufferCount, - ByteSizeValue maxWriteBufferSize, - long maxBatchSizeInBytes) { + static final class ShardFollowTaskParams { + private String remoteCluster = null; + private ShardId followShardId = new ShardId("follow_index", "", 0); + private ShardId leaderShardId = new ShardId("leader_index", "", 0); + private int maxReadRequestOperationCount = Integer.MAX_VALUE; + private ByteSizeValue maxReadRequestSize = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES); + private int maxOutstandingReadRequests = Integer.MAX_VALUE; + private int maxWriteRequestOperationCount = Integer.MAX_VALUE; + private ByteSizeValue maxWriteRequestSize = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES); + private int maxOutstandingWriteRequests = Integer.MAX_VALUE; + private int maxWriteBufferCount = Integer.MAX_VALUE; + private ByteSizeValue maxWriteBufferSize = new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES); + private TimeValue maxRetryDelay = TimeValue.ZERO; + private TimeValue readPollTimeout = TimeValue.ZERO; + private Map headers = Collections.emptyMap(); + } + + private ShardFollowNodeTask createShardFollowTask(ShardFollowTaskParams params) { AtomicBoolean stopped = new AtomicBoolean(false); - ShardFollowTask params = new ShardFollowTask( - null, - new ShardId("follow_index", "", 0), - new ShardId("leader_index", "", 0), - maxBatchOperationCount, - maxConcurrentReadBatches, - new ByteSizeValue(maxBatchSizeInBytes, ByteSizeUnit.BYTES), - maxConcurrentWriteBatches, - maxWriteBufferCount, - maxWriteBufferSize, - TimeValue.ZERO, - TimeValue.ZERO, - Collections.emptyMap() + ShardFollowTask followTask = new ShardFollowTask( + params.remoteCluster, + params.followShardId, + params.leaderShardId, + params.maxReadRequestOperationCount, + params.maxReadRequestSize, + params.maxOutstandingReadRequests, + params.maxWriteRequestOperationCount, + params.maxWriteRequestSize, + params.maxOutstandingWriteRequests, + params.maxWriteBufferCount, + params.maxWriteBufferSize, + params.maxRetryDelay, + params.readPollTimeout, + params.headers ); shardChangesRequests = new ArrayList<>(); @@ -775,7 +871,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { maxSeqNos = new LinkedList<>(); responseSizes = new LinkedList<>(); return new ShardFollowNodeTask( - 1L, "type", ShardFollowTask.NAME, "description", null, Collections.emptyMap(), params, scheduler, System::nanoTime) { + 1L, "type", ShardFollowTask.NAME, "description", null, Collections.emptyMap(), followTask, scheduler, System::nanoTime) { @Override protected void innerUpdateMapping(LongConsumer handler, Consumer errorHandler) { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index 07c3121eba4..d2f09c3900d 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -368,7 +368,9 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest new ShardId("follow_index", "", 0), new ShardId("leader_index", "", 0), between(1, 64), + new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES), between(1, 8), + between(1, 64), new ByteSizeValue(Long.MAX_VALUE, ByteSizeUnit.BYTES), between(1, 4), 10240, @@ -435,7 +437,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest return; } Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from, - maxOperationCount, recordedLeaderIndexHistoryUUID, params.getMaxBatchSize()); + maxOperationCount, recordedLeaderIndexHistoryUUID, params.getMaxReadRequestSize()); // hard code mapping version; this is ok, as mapping updates are not tested here final ShardChangesAction.Response response = new ShardChangesAction.Response( 1L, diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java index e955f77d733..1dfe4a98970 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskTests.java @@ -30,6 +30,8 @@ public class ShardFollowTaskTests extends AbstractSerializingTestCase existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("name1", - new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); @@ -44,7 +44,7 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { List existingPatterns = new ArrayList<>(); existingPatterns.add("logs-*"); existingAutoFollowPatterns.put("name2", - new AutoFollowPattern("asia_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("asia_cluster", existingPatterns, null, null, null, null, null, null, null, null, null, null, null)); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); @@ -78,7 +78,7 @@ public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("name1", - new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null, null, null)); existingHeaders.put("key", Collections.singletonMap("key", "val")); } ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java index 128474bbc30..e2c7f327ab9 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java @@ -24,9 +24,9 @@ public class TransportGetAutoFollowPatternActionTests extends ESTestCase { public void testGetAutoFollowPattern() { Map patterns = new HashMap<>(); patterns.put("name1", new AutoFollowPattern( - "test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null, null)); + "test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null, null, null, null)); patterns.put("name2", new AutoFollowPattern( - "test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null, null)); + "test_alias1", Collections.singletonList("index-*"), null, null, null, null, null, null, null, null, null, null, null)); MetaData metaData = MetaData.builder() .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())) .build(); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java index 433ef402af8..ac556d47c85 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java @@ -103,7 +103,7 @@ public class TransportPutAutoFollowPatternActionTests extends ESTestCase { List existingPatterns = new ArrayList<>(); existingPatterns.add("transactions-*"); existingAutoFollowPatterns.put("name1", - new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null)); + new AutoFollowPattern("eu_cluster", existingPatterns, null, null, null, null, null, null, null, null, null, null, null)); Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); List existingUUIDS = new ArrayList<>(); existingUUIDS.add("_val"); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java index 82cbe2622b7..4a201e37355 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowActionTests.java @@ -79,8 +79,10 @@ public class TransportUnfollowActionTests extends ESTestCase { new ShardId("follow_index", "", 0), new ShardId("leader_index", "", 0), 1024, + TransportResumeFollowAction.DEFAULT_MAX_READ_REQUEST_SIZE, 1, - TransportResumeFollowAction.DEFAULT_MAX_BATCH_SIZE, + 1024, + TransportResumeFollowAction.DEFAULT_MAX_READ_REQUEST_SIZE, 1, 10240, new ByteSizeValue(512, ByteSizeUnit.MB), diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index 9b4ed7c8a97..f3e0c2d5bd7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -91,24 +91,24 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase> fetchExceptions = new TreeMap<>(Collections.singletonMap( randomNonNegativeLong(), Tuple.tuple(randomIntBetween(0, Integer.MAX_VALUE), new ElasticsearchException("shard is sad")))); - final long timeSinceLastFetchMillis = randomNonNegativeLong(); + final long timeSinceLastReadMillis = randomNonNegativeLong(); final ShardFollowNodeTaskStatus status = new ShardFollowNodeTaskStatus( "leader_cluster", "leader_index", @@ -121,21 +121,21 @@ public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase) fieldMapping.get("properties")).size(), equalTo(3)); assertThat(XContentMapValues.extractValue("properties.from_seq_no.type", fieldMapping), equalTo("long")); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index 8172612b78f..379dbe7a421 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -178,34 +178,42 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public static final ParseField REMOTE_CLUSTER_FIELD = new ParseField("remote_cluster"); public static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_index_patterns"); public static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_index_pattern"); - public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); - public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); - public static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); - public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + public static final ParseField MAX_READ_REQUEST_OPERATION_COUNT = new ParseField("max_read_request_operation_count"); + public static final ParseField MAX_READ_REQUEST_SIZE = new ParseField("max_read_request_size"); + public static final ParseField MAX_OUTSTANDING_READ_REQUESTS = new ParseField("max_outstanding_read_requests"); + public static final ParseField MAX_WRITE_REQUEST_OPERATION_COUNT = new ParseField("max_write_request_operation_count"); + public static final ParseField MAX_WRITE_REQUEST_SIZE = new ParseField("max_write_request_size"); + public static final ParseField MAX_OUTSTANDING_WRITE_REQUESTS = new ParseField("max_outstanding_write_requests"); public static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count"); public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay"); - public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); + public static final ParseField READ_POLL_TIMEOUT = new ParseField("read_poll_timeout"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("auto_follow_pattern", args -> new AutoFollowPattern((String) args[0], (List) args[1], (String) args[2], (Integer) args[3], - (Integer) args[4], (ByteSizeValue) args[5], (Integer) args[6], (Integer) args[7], (ByteSizeValue) args[8], - (TimeValue) args[9], (TimeValue) args[10])); + (ByteSizeValue) args[4], (Integer) args[5], (Integer) args[6], (ByteSizeValue) args[7], (Integer) args[8], + (Integer) args[9], (ByteSizeValue) args[10], (TimeValue) args[11], (TimeValue) args[12])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), REMOTE_CLUSTER_FIELD); PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), LEADER_PATTERNS_FIELD); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FOLLOW_PATTERN_FIELD); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_OPERATION_COUNT); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_CONCURRENT_READ_BATCHES); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_READ_REQUEST_OPERATION_COUNT); PARSER.declareField( ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), - MAX_BATCH_SIZE, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_READ_REQUEST_SIZE.getPreferredName()), + MAX_READ_REQUEST_SIZE, ObjectParser.ValueType.STRING); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_CONCURRENT_WRITE_BATCHES); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OUTSTANDING_READ_REQUESTS); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_WRITE_REQUEST_OPERATION_COUNT); + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_REQUEST_SIZE.getPreferredName()), + MAX_WRITE_REQUEST_SIZE, + ObjectParser.ValueType.STRING); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OUTSTANDING_WRITE_REQUESTS); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_WRITE_BUFFER_COUNT); PARSER.declareField( ConstructingObjectParser.optionalConstructorArg(), @@ -216,17 +224,19 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY.getPreferredName()), MAX_RETRY_DELAY, ObjectParser.ValueType.STRING); PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()), - POLL_TIMEOUT, ObjectParser.ValueType.STRING); + (p, c) -> TimeValue.parseTimeValue(p.text(), READ_POLL_TIMEOUT.getPreferredName()), + READ_POLL_TIMEOUT, ObjectParser.ValueType.STRING); } private final String remoteCluster; private final List leaderIndexPatterns; private final String followIndexPattern; - private final Integer maxBatchOperationCount; - private final Integer maxConcurrentReadBatches; - private final ByteSizeValue maxBatchSize; - private final Integer maxConcurrentWriteBatches; + private final Integer maxReadRequestOperationCount; + private final ByteSizeValue maxReadRequestSize; + private final Integer maxOutstandingReadRequests; + private final Integer maxWriteRequestOperationCount; + private final ByteSizeValue maxWriteRequestSize; + private final Integer maxOutstandingWriteRequests; private final Integer maxWriteBufferCount; private final ByteSizeValue maxWriteBufferSize; private final TimeValue maxRetryDelay; @@ -235,20 +245,24 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public AutoFollowPattern(String remoteCluster, List leaderIndexPatterns, String followIndexPattern, - Integer maxBatchOperationCount, - Integer maxConcurrentReadBatches, - ByteSizeValue maxBatchSize, - Integer maxConcurrentWriteBatches, + Integer maxReadRequestOperationCount, + ByteSizeValue maxReadRequestSize, + Integer maxOutstandingReadRequests, + Integer maxWriteRequestOperationCount, + ByteSizeValue maxWriteRequestSize, + Integer maxOutstandingWriteRequests, Integer maxWriteBufferCount, ByteSizeValue maxWriteBufferSize, TimeValue maxRetryDelay, TimeValue pollTimeout) { this.remoteCluster = remoteCluster; this.leaderIndexPatterns = leaderIndexPatterns; this.followIndexPattern = followIndexPattern; - this.maxBatchOperationCount = maxBatchOperationCount; - this.maxConcurrentReadBatches = maxConcurrentReadBatches; - this.maxBatchSize = maxBatchSize; - this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; + this.maxReadRequestOperationCount = maxReadRequestOperationCount; + this.maxReadRequestSize = maxReadRequestSize; + this.maxOutstandingReadRequests = maxOutstandingReadRequests; + this.maxWriteRequestOperationCount = maxWriteRequestOperationCount; + this.maxWriteRequestSize = maxWriteRequestSize; + this.maxOutstandingWriteRequests = maxOutstandingWriteRequests; this.maxWriteBufferCount = maxWriteBufferCount; this.maxWriteBufferSize = maxWriteBufferSize; this.maxRetryDelay = maxRetryDelay; @@ -259,10 +273,12 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i remoteCluster = in.readString(); leaderIndexPatterns = in.readList(StreamInput::readString); followIndexPattern = in.readOptionalString(); - maxBatchOperationCount = in.readOptionalVInt(); - maxConcurrentReadBatches = in.readOptionalVInt(); - maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); - maxConcurrentWriteBatches = in.readOptionalVInt(); + maxReadRequestOperationCount = in.readOptionalVInt(); + maxReadRequestSize = in.readOptionalWriteable(ByteSizeValue::new); + maxOutstandingReadRequests = in.readOptionalVInt(); + maxWriteRequestOperationCount = in.readOptionalVInt(); + maxWriteRequestSize = in.readOptionalWriteable(ByteSizeValue::new); + maxOutstandingWriteRequests = in.readOptionalVInt(); maxWriteBufferCount = in.readOptionalVInt(); maxWriteBufferSize = in.readOptionalWriteable(ByteSizeValue::new); maxRetryDelay = in.readOptionalTimeValue(); @@ -289,20 +305,28 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return followIndexPattern; } - public Integer getMaxBatchOperationCount() { - return maxBatchOperationCount; + public Integer getMaxReadRequestOperationCount() { + return maxReadRequestOperationCount; } - public Integer getMaxConcurrentReadBatches() { - return maxConcurrentReadBatches; + public Integer getMaxOutstandingReadRequests() { + return maxOutstandingReadRequests; } - public ByteSizeValue getMaxBatchSize() { - return maxBatchSize; + public ByteSizeValue getMaxReadRequestSize() { + return maxReadRequestSize; } - public Integer getMaxConcurrentWriteBatches() { - return maxConcurrentWriteBatches; + public Integer getMaxWriteRequestOperationCount() { + return maxWriteRequestOperationCount; + } + + public ByteSizeValue getMaxWriteRequestSize() { + return maxWriteRequestSize; + } + + public Integer getMaxOutstandingWriteRequests() { + return maxOutstandingWriteRequests; } public Integer getMaxWriteBufferCount() { @@ -326,10 +350,12 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i out.writeString(remoteCluster); out.writeStringList(leaderIndexPatterns); out.writeOptionalString(followIndexPattern); - out.writeOptionalVInt(maxBatchOperationCount); - out.writeOptionalVInt(maxConcurrentReadBatches); - out.writeOptionalWriteable(maxBatchSize); - out.writeOptionalVInt(maxConcurrentWriteBatches); + out.writeOptionalVInt(maxReadRequestOperationCount); + out.writeOptionalWriteable(maxReadRequestSize); + out.writeOptionalVInt(maxOutstandingReadRequests); + out.writeOptionalVInt(maxWriteRequestOperationCount); + out.writeOptionalWriteable(maxWriteRequestSize); + out.writeOptionalVInt(maxOutstandingWriteRequests); out.writeOptionalVInt(maxWriteBufferCount); out.writeOptionalWriteable(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); @@ -343,17 +369,23 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i if (followIndexPattern != null) { builder.field(FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexPattern); } - if (maxBatchOperationCount != null) { - builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); + if (maxReadRequestOperationCount != null) { + builder.field(MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName(), maxReadRequestOperationCount); } - if (maxConcurrentReadBatches != null) { - builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); + if (maxReadRequestSize != null) { + builder.field(MAX_READ_REQUEST_SIZE.getPreferredName(), maxReadRequestSize.getStringRep()); } - if (maxBatchSize != null) { - builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); + if (maxOutstandingReadRequests != null) { + builder.field(MAX_OUTSTANDING_READ_REQUESTS.getPreferredName(), maxOutstandingReadRequests); } - if (maxConcurrentWriteBatches != null) { - builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); + if (maxWriteRequestOperationCount != null) { + builder.field(MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName(), maxWriteRequestOperationCount); + } + if (maxWriteRequestSize != null) { + builder.field(MAX_WRITE_REQUEST_SIZE.getPreferredName(), maxWriteRequestSize.getStringRep()); + } + if (maxOutstandingWriteRequests != null) { + builder.field(MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName(), maxOutstandingWriteRequests); } if (maxWriteBufferCount != null){ builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); @@ -365,7 +397,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay); } if (pollTimeout != null) { - builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout); + builder.field(READ_POLL_TIMEOUT.getPreferredName(), pollTimeout); } return builder; } @@ -383,10 +415,12 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i return Objects.equals(remoteCluster, that.remoteCluster) && Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) && Objects.equals(followIndexPattern, that.followIndexPattern) && - Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) && - Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) && - Objects.equals(maxBatchSize, that.maxBatchSize) && - Objects.equals(maxConcurrentWriteBatches, that.maxConcurrentWriteBatches) && + Objects.equals(maxReadRequestOperationCount, that.maxReadRequestOperationCount) && + Objects.equals(maxReadRequestSize, that.maxReadRequestSize) && + Objects.equals(maxOutstandingReadRequests, that.maxOutstandingReadRequests) && + Objects.equals(maxWriteRequestOperationCount, that.maxWriteRequestOperationCount) && + Objects.equals(maxWriteRequestSize, that.maxWriteRequestSize) && + Objects.equals(maxOutstandingWriteRequests, that.maxOutstandingWriteRequests) && Objects.equals(maxWriteBufferCount, that.maxWriteBufferCount) && Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) && Objects.equals(maxRetryDelay, that.maxRetryDelay) && @@ -399,10 +433,12 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i remoteCluster, leaderIndexPatterns, followIndexPattern, - maxBatchOperationCount, - maxConcurrentReadBatches, - maxBatchSize, - maxConcurrentWriteBatches, + maxReadRequestOperationCount, + maxReadRequestSize, + maxOutstandingReadRequests, + maxWriteRequestOperationCount, + maxWriteRequestSize, + maxOutstandingWriteRequests, maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java index e2e907f80d1..b8f645eea44 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/ShardFollowNodeTaskStatus.java @@ -43,23 +43,23 @@ public class ShardFollowNodeTaskStatus implements Task.Status { private static final ParseField FOLLOWER_GLOBAL_CHECKPOINT_FIELD = new ParseField("follower_global_checkpoint"); private static final ParseField FOLLOWER_MAX_SEQ_NO_FIELD = new ParseField("follower_max_seq_no"); private static final ParseField LAST_REQUESTED_SEQ_NO_FIELD = new ParseField("last_requested_seq_no"); - private static final ParseField NUMBER_OF_CONCURRENT_READS_FIELD = new ParseField("number_of_concurrent_reads"); - private static final ParseField NUMBER_OF_CONCURRENT_WRITES_FIELD = new ParseField("number_of_concurrent_writes"); - private static final ParseField NUMBER_OF_QUEUED_WRITES_FIELD = new ParseField("number_of_queued_writes"); - private static final ParseField BUFFER_SIZE_IN_BYTES_FIELD = new ParseField("buffer_size_in_bytes"); - private static final ParseField MAPPING_VERSION_FIELD = new ParseField("mapping_version"); - private static final ParseField TOTAL_FETCH_TIME_MILLIS_FIELD = new ParseField("total_fetch_time_millis"); - private static final ParseField TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD = new ParseField("total_fetch_remote_time_millis"); - private static final ParseField NUMBER_OF_SUCCESSFUL_FETCHES_FIELD = new ParseField("number_of_successful_fetches"); - private static final ParseField NUMBER_OF_FAILED_FETCHES_FIELD = new ParseField("number_of_failed_fetches"); - private static final ParseField OPERATIONS_RECEIVED_FIELD = new ParseField("operations_received"); - private static final ParseField TOTAL_TRANSFERRED_BYTES = new ParseField("total_transferred_bytes"); - private static final ParseField TOTAL_INDEX_TIME_MILLIS_FIELD = new ParseField("total_index_time_millis"); - private static final ParseField NUMBER_OF_SUCCESSFUL_BULK_OPERATIONS_FIELD = new ParseField("number_of_successful_bulk_operations"); - private static final ParseField NUMBER_OF_FAILED_BULK_OPERATIONS_FIELD = new ParseField("number_of_failed_bulk_operations"); - private static final ParseField NUMBER_OF_OPERATIONS_INDEXED_FIELD = new ParseField("number_of_operations_indexed"); - private static final ParseField FETCH_EXCEPTIONS = new ParseField("fetch_exceptions"); - private static final ParseField TIME_SINCE_LAST_FETCH_MILLIS_FIELD = new ParseField("time_since_last_fetch_millis"); + private static final ParseField OUTSTANDING_READ_REQUESTS = new ParseField("outstanding_read_requests"); + private static final ParseField OUTSTANDING_WRITE_REQUESTS = new ParseField("outstanding_write_requests"); + private static final ParseField WRITE_BUFFER_OPERATION_COUNT_FIELD = new ParseField("write_buffer_operation_count"); + private static final ParseField WRITE_BUFFER_SIZE_IN_BYTES_FIELD = new ParseField("write_buffer_size_in_bytes"); + private static final ParseField FOLLOWER_MAPPING_VERSION_FIELD = new ParseField("follower_mapping_version"); + private static final ParseField TOTAL_READ_TIME_MILLIS_FIELD = new ParseField("total_read_time_millis"); + private static final ParseField TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD = new ParseField("total_read_remote_exec_time_millis"); + private static final ParseField SUCCESSFUL_READ_REQUESTS_FIELD = new ParseField("successful_read_requests"); + private static final ParseField FAILED_READ_REQUESTS_FIELD = new ParseField("failed_read_requests"); + private static final ParseField OPERATIONS_READ_FIELD = new ParseField("operations_read"); + private static final ParseField BYTES_READ = new ParseField("bytes_read"); + private static final ParseField TOTAL_WRITE_TIME_MILLIS_FIELD = new ParseField("total_write_time_millis"); + private static final ParseField SUCCESSFUL_WRITE_REQUESTS_FIELD = new ParseField("successful_write_requests"); + private static final ParseField FAILED_WRITE_REQUEST_FIELD = new ParseField("failed_write_requests"); + private static final ParseField OPERATIONS_WRITTEN = new ParseField("operations_written"); + private static final ParseField READ_EXCEPTIONS = new ParseField("read_exceptions"); + private static final ParseField TIME_SINCE_LAST_READ_MILLIS_FIELD = new ParseField("time_since_last_read_millis"); private static final ParseField FATAL_EXCEPTION = new ParseField("fatal_exception"); @SuppressWarnings("unchecked") @@ -98,11 +98,11 @@ public class ShardFollowNodeTaskStatus implements Task.Status { (long) args[25], (ElasticsearchException) args[26])); - public static final String FETCH_EXCEPTIONS_ENTRY_PARSER_NAME = "shard-follow-node-task-status-fetch-exceptions-entry"; + public static final String READ_EXCEPTIONS_ENTRY_PARSER_NAME = "shard-follow-node-task-status-read-exceptions-entry"; - static final ConstructingObjectParser>, Void> FETCH_EXCEPTIONS_ENTRY_PARSER = + static final ConstructingObjectParser>, Void> READ_EXCEPTIONS_ENTRY_PARSER = new ConstructingObjectParser<>( - FETCH_EXCEPTIONS_ENTRY_PARSER_NAME, + READ_EXCEPTIONS_ENTRY_PARSER_NAME, args -> new AbstractMap.SimpleEntry<>((long) args[0], Tuple.tuple((Integer)args[1], (ElasticsearchException)args[2]))); static { @@ -115,39 +115,39 @@ public class ShardFollowNodeTaskStatus implements Task.Status { STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_GLOBAL_CHECKPOINT_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_MAX_SEQ_NO_FIELD); STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_REQUESTED_SEQ_NO_FIELD); - STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_CONCURRENT_READS_FIELD); - STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_CONCURRENT_WRITES_FIELD); - STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUMBER_OF_QUEUED_WRITES_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUFFER_SIZE_IN_BYTES_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), MAPPING_VERSION_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_TIME_MILLIS_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_FETCHES_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_FETCHES_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_RECEIVED_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_TRANSFERRED_BYTES); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_INDEX_TIME_MILLIS_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_BULK_OPERATIONS_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_BULK_OPERATIONS_FIELD); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_OPERATIONS_INDEXED_FIELD); - STATUS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), FETCH_EXCEPTIONS_ENTRY_PARSER, FETCH_EXCEPTIONS); - STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_FETCH_MILLIS_FIELD); + STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), OUTSTANDING_READ_REQUESTS); + STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), OUTSTANDING_WRITE_REQUESTS); + STATUS_PARSER.declareInt(ConstructingObjectParser.constructorArg(), WRITE_BUFFER_OPERATION_COUNT_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), WRITE_BUFFER_SIZE_IN_BYTES_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FOLLOWER_MAPPING_VERSION_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_TIME_MILLIS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), SUCCESSFUL_READ_REQUESTS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILED_READ_REQUESTS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_READ_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), BYTES_READ); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL_WRITE_TIME_MILLIS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), SUCCESSFUL_WRITE_REQUESTS_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILED_WRITE_REQUEST_FIELD); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), OPERATIONS_WRITTEN); + STATUS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), READ_EXCEPTIONS_ENTRY_PARSER, READ_EXCEPTIONS); + STATUS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_READ_MILLIS_FIELD); STATUS_PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), FATAL_EXCEPTION); } - static final ParseField FETCH_EXCEPTIONS_ENTRY_FROM_SEQ_NO = new ParseField("from_seq_no"); - static final ParseField FETCH_EXCEPTIONS_RETRIES = new ParseField("retries"); - static final ParseField FETCH_EXCEPTIONS_ENTRY_EXCEPTION = new ParseField("exception"); + static final ParseField READ_EXCEPTIONS_ENTRY_FROM_SEQ_NO = new ParseField("from_seq_no"); + static final ParseField READ_EXCEPTIONS_RETRIES = new ParseField("retries"); + static final ParseField READ_EXCEPTIONS_ENTRY_EXCEPTION = new ParseField("exception"); static { - FETCH_EXCEPTIONS_ENTRY_PARSER.declareLong(ConstructingObjectParser.constructorArg(), FETCH_EXCEPTIONS_ENTRY_FROM_SEQ_NO); - FETCH_EXCEPTIONS_ENTRY_PARSER.declareInt(ConstructingObjectParser.constructorArg(), FETCH_EXCEPTIONS_RETRIES); - FETCH_EXCEPTIONS_ENTRY_PARSER.declareObject( + READ_EXCEPTIONS_ENTRY_PARSER.declareLong(ConstructingObjectParser.constructorArg(), READ_EXCEPTIONS_ENTRY_FROM_SEQ_NO); + READ_EXCEPTIONS_ENTRY_PARSER.declareInt(ConstructingObjectParser.constructorArg(), READ_EXCEPTIONS_RETRIES); + READ_EXCEPTIONS_ENTRY_PARSER.declareObject( ConstructingObjectParser.constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), - FETCH_EXCEPTIONS_ENTRY_EXCEPTION); + READ_EXCEPTIONS_ENTRY_EXCEPTION); } private final String remoteCluster; @@ -204,106 +204,106 @@ public class ShardFollowNodeTaskStatus implements Task.Status { return lastRequestedSeqNo; } - private final int numberOfConcurrentReads; + private final int outstandingReadRequests; - public int numberOfConcurrentReads() { - return numberOfConcurrentReads; + public int outstandingReadRequests() { + return outstandingReadRequests; } - private final int numberOfConcurrentWrites; + private final int outstandingWriteRequests; - public int numberOfConcurrentWrites() { - return numberOfConcurrentWrites; + public int outstandingWriteRequests() { + return outstandingWriteRequests; } - private final int numberOfQueuedWrites; + private final int writeBufferOperationCount; - public int numberOfQueuedWrites() { - return numberOfQueuedWrites; + public int writeBufferOperationCount() { + return writeBufferOperationCount; } - private final long bufferSize; + private final long writeBufferSizeInBytes; - public long bufferSize() { - return bufferSize; + public long writeBufferSizeInBytes() { + return writeBufferSizeInBytes; } - private final long mappingVersion; + private final long followerMappingVersion; - public long mappingVersion() { - return mappingVersion; + public long followerMappingVersion() { + return followerMappingVersion; } - private final long totalFetchTimeMillis; + private final long totalReadTimeMillis; - public long totalFetchTimeMillis() { - return totalFetchTimeMillis; + public long totalReadTimeMillis() { + return totalReadTimeMillis; } - private final long totalFetchRemoteTimeMillis; + private final long totalReadRemoteExecTimeMillis; - public long totalFetchRemoteTimeMillis() { - return totalFetchRemoteTimeMillis; + public long totalReadRemoteExecTimeMillis() { + return totalReadRemoteExecTimeMillis; } - private final long numberOfSuccessfulFetches; + private final long successfulReadRequests; - public long numberOfSuccessfulFetches() { - return numberOfSuccessfulFetches; + public long successfulReadRequests() { + return successfulReadRequests; } - private final long numberOfFailedFetches; + private final long failedReadRequests; - public long numberOfFailedFetches() { - return numberOfFailedFetches; + public long failedReadRequests() { + return failedReadRequests; } - private final long operationsReceived; + private final long operationsReads; - public long operationsReceived() { - return operationsReceived; + public long operationsReads() { + return operationsReads; } - private final long totalTransferredBytes; + private final long bytesRead; - public long totalTransferredBytes() { - return totalTransferredBytes; + public long bytesRead() { + return bytesRead; } - private final long totalIndexTimeMillis; + private final long totalWriteTimeMillis; - public long totalIndexTimeMillis() { - return totalIndexTimeMillis; + public long totalWriteTimeMillis() { + return totalWriteTimeMillis; } - private final long numberOfSuccessfulBulkOperations; + private final long successfulWriteRequests; - public long numberOfSuccessfulBulkOperations() { - return numberOfSuccessfulBulkOperations; + public long successfulWriteRequests() { + return successfulWriteRequests; } - private final long numberOfFailedBulkOperations; + private final long failedWriteRequests; - public long numberOfFailedBulkOperations() { - return numberOfFailedBulkOperations; + public long failedWriteRequests() { + return failedWriteRequests; } - private final long numberOfOperationsIndexed; + private final long operationWritten; - public long numberOfOperationsIndexed() { - return numberOfOperationsIndexed; + public long operationWritten() { + return operationWritten; } - private final NavigableMap> fetchExceptions; + private final NavigableMap> readExceptions; - public NavigableMap> fetchExceptions() { - return fetchExceptions; + public NavigableMap> readExceptions() { + return readExceptions; } - private final long timeSinceLastFetchMillis; + private final long timeSinceLastReadMillis; - public long timeSinceLastFetchMillis() { - return timeSinceLastFetchMillis; + public long timeSinceLastReadMillis() { + return timeSinceLastReadMillis; } private final ElasticsearchException fatalException; @@ -322,23 +322,23 @@ public class ShardFollowNodeTaskStatus implements Task.Status { final long followerGlobalCheckpoint, final long followerMaxSeqNo, final long lastRequestedSeqNo, - final int numberOfConcurrentReads, - final int numberOfConcurrentWrites, - final int numberOfQueuedWrites, - final long bufferSize, - final long mappingVersion, - final long totalFetchTimeMillis, - final long totalFetchRemoteTimeMillis, - final long numberOfSuccessfulFetches, - final long numberOfFailedFetches, - final long operationsReceived, - final long totalTransferredBytes, - final long totalIndexTimeMillis, - final long numberOfSuccessfulBulkOperations, - final long numberOfFailedBulkOperations, - final long numberOfOperationsIndexed, - final NavigableMap> fetchExceptions, - final long timeSinceLastFetchMillis, + final int outstandingReadRequests, + final int outstandingWriteRequests, + final int writeBufferOperationCount, + final long writeBufferSizeInBytes, + final long followerMappingVersion, + final long totalReadTimeMillis, + final long totalReadRemoteExecTimeMillis, + final long successfulReadRequests, + final long failedReadRequests, + final long operationsReads, + final long bytesRead, + final long totalWriteTimeMillis, + final long successfulWriteRequests, + final long failedWriteRequests, + final long operationWritten, + final NavigableMap> readExceptions, + final long timeSinceLastReadMillis, final ElasticsearchException fatalException) { this.remoteCluster = remoteCluster; this.leaderIndex = leaderIndex; @@ -349,23 +349,23 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.followerGlobalCheckpoint = followerGlobalCheckpoint; this.followerMaxSeqNo = followerMaxSeqNo; this.lastRequestedSeqNo = lastRequestedSeqNo; - this.numberOfConcurrentReads = numberOfConcurrentReads; - this.numberOfConcurrentWrites = numberOfConcurrentWrites; - this.numberOfQueuedWrites = numberOfQueuedWrites; - this.bufferSize = bufferSize; - this.mappingVersion = mappingVersion; - this.totalFetchTimeMillis = totalFetchTimeMillis; - this.totalFetchRemoteTimeMillis = totalFetchRemoteTimeMillis; - this.numberOfSuccessfulFetches = numberOfSuccessfulFetches; - this.numberOfFailedFetches = numberOfFailedFetches; - this.operationsReceived = operationsReceived; - this.totalTransferredBytes = totalTransferredBytes; - this.totalIndexTimeMillis = totalIndexTimeMillis; - this.numberOfSuccessfulBulkOperations = numberOfSuccessfulBulkOperations; - this.numberOfFailedBulkOperations = numberOfFailedBulkOperations; - this.numberOfOperationsIndexed = numberOfOperationsIndexed; - this.fetchExceptions = Objects.requireNonNull(fetchExceptions); - this.timeSinceLastFetchMillis = timeSinceLastFetchMillis; + this.outstandingReadRequests = outstandingReadRequests; + this.outstandingWriteRequests = outstandingWriteRequests; + this.writeBufferOperationCount = writeBufferOperationCount; + this.writeBufferSizeInBytes = writeBufferSizeInBytes; + this.followerMappingVersion = followerMappingVersion; + this.totalReadTimeMillis = totalReadTimeMillis; + this.totalReadRemoteExecTimeMillis = totalReadRemoteExecTimeMillis; + this.successfulReadRequests = successfulReadRequests; + this.failedReadRequests = failedReadRequests; + this.operationsReads = operationsReads; + this.bytesRead = bytesRead; + this.totalWriteTimeMillis = totalWriteTimeMillis; + this.successfulWriteRequests = successfulWriteRequests; + this.failedWriteRequests = failedWriteRequests; + this.operationWritten = operationWritten; + this.readExceptions = Objects.requireNonNull(readExceptions); + this.timeSinceLastReadMillis = timeSinceLastReadMillis; this.fatalException = fatalException; } @@ -379,24 +379,24 @@ public class ShardFollowNodeTaskStatus implements Task.Status { this.followerGlobalCheckpoint = in.readZLong(); this.followerMaxSeqNo = in.readZLong(); this.lastRequestedSeqNo = in.readZLong(); - this.numberOfConcurrentReads = in.readVInt(); - this.numberOfConcurrentWrites = in.readVInt(); - this.numberOfQueuedWrites = in.readVInt(); - this.bufferSize = in.readVLong(); - this.mappingVersion = in.readVLong(); - this.totalFetchTimeMillis = in.readVLong(); - this.totalFetchRemoteTimeMillis = in.readVLong(); - this.numberOfSuccessfulFetches = in.readVLong(); - this.numberOfFailedFetches = in.readVLong(); - this.operationsReceived = in.readVLong(); - this.totalTransferredBytes = in.readVLong(); - this.totalIndexTimeMillis = in.readVLong(); - this.numberOfSuccessfulBulkOperations = in.readVLong(); - this.numberOfFailedBulkOperations = in.readVLong(); - this.numberOfOperationsIndexed = in.readVLong(); - this.fetchExceptions = + this.outstandingReadRequests = in.readVInt(); + this.outstandingWriteRequests = in.readVInt(); + this.writeBufferOperationCount = in.readVInt(); + this.writeBufferSizeInBytes = in.readVLong(); + this.followerMappingVersion = in.readVLong(); + this.totalReadTimeMillis = in.readVLong(); + this.totalReadRemoteExecTimeMillis = in.readVLong(); + this.successfulReadRequests = in.readVLong(); + this.failedReadRequests = in.readVLong(); + this.operationsReads = in.readVLong(); + this.bytesRead = in.readVLong(); + this.totalWriteTimeMillis = in.readVLong(); + this.successfulWriteRequests = in.readVLong(); + this.failedWriteRequests = in.readVLong(); + this.operationWritten = in.readVLong(); + this.readExceptions = new TreeMap<>(in.readMap(StreamInput::readVLong, stream -> Tuple.tuple(stream.readVInt(), stream.readException()))); - this.timeSinceLastFetchMillis = in.readZLong(); + this.timeSinceLastReadMillis = in.readZLong(); this.fatalException = in.readException(); } @@ -416,29 +416,29 @@ public class ShardFollowNodeTaskStatus implements Task.Status { out.writeZLong(followerGlobalCheckpoint); out.writeZLong(followerMaxSeqNo); out.writeZLong(lastRequestedSeqNo); - out.writeVInt(numberOfConcurrentReads); - out.writeVInt(numberOfConcurrentWrites); - out.writeVInt(numberOfQueuedWrites); - out.writeVLong(bufferSize); - out.writeVLong(mappingVersion); - out.writeVLong(totalFetchTimeMillis); - out.writeVLong(totalFetchRemoteTimeMillis); - out.writeVLong(numberOfSuccessfulFetches); - out.writeVLong(numberOfFailedFetches); - out.writeVLong(operationsReceived); - out.writeVLong(totalTransferredBytes); - out.writeVLong(totalIndexTimeMillis); - out.writeVLong(numberOfSuccessfulBulkOperations); - out.writeVLong(numberOfFailedBulkOperations); - out.writeVLong(numberOfOperationsIndexed); + out.writeVInt(outstandingReadRequests); + out.writeVInt(outstandingWriteRequests); + out.writeVInt(writeBufferOperationCount); + out.writeVLong(writeBufferSizeInBytes); + out.writeVLong(followerMappingVersion); + out.writeVLong(totalReadTimeMillis); + out.writeVLong(totalReadRemoteExecTimeMillis); + out.writeVLong(successfulReadRequests); + out.writeVLong(failedReadRequests); + out.writeVLong(operationsReads); + out.writeVLong(bytesRead); + out.writeVLong(totalWriteTimeMillis); + out.writeVLong(successfulWriteRequests); + out.writeVLong(failedWriteRequests); + out.writeVLong(operationWritten); out.writeMap( - fetchExceptions, + readExceptions, StreamOutput::writeVLong, (stream, value) -> { stream.writeVInt(value.v1()); stream.writeException(value.v2()); }); - out.writeZLong(timeSinceLastFetchMillis); + out.writeZLong(timeSinceLastReadMillis); out.writeException(fatalException); } @@ -462,44 +462,44 @@ public class ShardFollowNodeTaskStatus implements Task.Status { builder.field(FOLLOWER_GLOBAL_CHECKPOINT_FIELD.getPreferredName(), followerGlobalCheckpoint); builder.field(FOLLOWER_MAX_SEQ_NO_FIELD.getPreferredName(), followerMaxSeqNo); builder.field(LAST_REQUESTED_SEQ_NO_FIELD.getPreferredName(), lastRequestedSeqNo); - builder.field(NUMBER_OF_CONCURRENT_READS_FIELD.getPreferredName(), numberOfConcurrentReads); - builder.field(NUMBER_OF_CONCURRENT_WRITES_FIELD.getPreferredName(), numberOfConcurrentWrites); - builder.field(NUMBER_OF_QUEUED_WRITES_FIELD.getPreferredName(), numberOfQueuedWrites); + builder.field(OUTSTANDING_READ_REQUESTS.getPreferredName(), outstandingReadRequests); + builder.field(OUTSTANDING_WRITE_REQUESTS.getPreferredName(), outstandingWriteRequests); + builder.field(WRITE_BUFFER_OPERATION_COUNT_FIELD.getPreferredName(), writeBufferOperationCount); builder.humanReadableField( - BUFFER_SIZE_IN_BYTES_FIELD.getPreferredName(), - "buffer_size", - new ByteSizeValue(bufferSize)); - builder.field(MAPPING_VERSION_FIELD.getPreferredName(), mappingVersion); + WRITE_BUFFER_SIZE_IN_BYTES_FIELD.getPreferredName(), + "write_buffer_size", + new ByteSizeValue(writeBufferSizeInBytes)); + builder.field(FOLLOWER_MAPPING_VERSION_FIELD.getPreferredName(), followerMappingVersion); builder.humanReadableField( - TOTAL_FETCH_TIME_MILLIS_FIELD.getPreferredName(), - "total_fetch_time", - new TimeValue(totalFetchTimeMillis, TimeUnit.MILLISECONDS)); + TOTAL_READ_TIME_MILLIS_FIELD.getPreferredName(), + "total_read_time", + new TimeValue(totalReadTimeMillis, TimeUnit.MILLISECONDS)); builder.humanReadableField( - TOTAL_FETCH_REMOTE_TIME_MILLIS_FIELD.getPreferredName(), - "total_fetch_leader_time", - new TimeValue(totalFetchRemoteTimeMillis, TimeUnit.MILLISECONDS)); - builder.field(NUMBER_OF_SUCCESSFUL_FETCHES_FIELD.getPreferredName(), numberOfSuccessfulFetches); - builder.field(NUMBER_OF_FAILED_FETCHES_FIELD.getPreferredName(), numberOfFailedFetches); - builder.field(OPERATIONS_RECEIVED_FIELD.getPreferredName(), operationsReceived); + TOTAL_READ_REMOTE_EXEC_TIME_MILLIS_FIELD.getPreferredName(), + "total_read_remote_exec_time", + new TimeValue(totalReadRemoteExecTimeMillis, TimeUnit.MILLISECONDS)); + builder.field(SUCCESSFUL_READ_REQUESTS_FIELD.getPreferredName(), successfulReadRequests); + builder.field(FAILED_READ_REQUESTS_FIELD.getPreferredName(), failedReadRequests); + builder.field(OPERATIONS_READ_FIELD.getPreferredName(), operationsReads); builder.humanReadableField( - TOTAL_TRANSFERRED_BYTES.getPreferredName(), - "total_transferred", - new ByteSizeValue(totalTransferredBytes, ByteSizeUnit.BYTES)); + BYTES_READ.getPreferredName(), + "total_read", + new ByteSizeValue(bytesRead, ByteSizeUnit.BYTES)); builder.humanReadableField( - TOTAL_INDEX_TIME_MILLIS_FIELD.getPreferredName(), - "total_index_time", - new TimeValue(totalIndexTimeMillis, TimeUnit.MILLISECONDS)); - builder.field(NUMBER_OF_SUCCESSFUL_BULK_OPERATIONS_FIELD.getPreferredName(), numberOfSuccessfulBulkOperations); - builder.field(NUMBER_OF_FAILED_BULK_OPERATIONS_FIELD.getPreferredName(), numberOfFailedBulkOperations); - builder.field(NUMBER_OF_OPERATIONS_INDEXED_FIELD.getPreferredName(), numberOfOperationsIndexed); - builder.startArray(FETCH_EXCEPTIONS.getPreferredName()); + TOTAL_WRITE_TIME_MILLIS_FIELD.getPreferredName(), + "total_write_time", + new TimeValue(totalWriteTimeMillis, TimeUnit.MILLISECONDS)); + builder.field(SUCCESSFUL_WRITE_REQUESTS_FIELD.getPreferredName(), successfulWriteRequests); + builder.field(FAILED_WRITE_REQUEST_FIELD.getPreferredName(), failedWriteRequests); + builder.field(OPERATIONS_WRITTEN.getPreferredName(), operationWritten); + builder.startArray(READ_EXCEPTIONS.getPreferredName()); { - for (final Map.Entry> entry : fetchExceptions.entrySet()) { + for (final Map.Entry> entry : readExceptions.entrySet()) { builder.startObject(); { - builder.field(FETCH_EXCEPTIONS_ENTRY_FROM_SEQ_NO.getPreferredName(), entry.getKey()); - builder.field(FETCH_EXCEPTIONS_RETRIES.getPreferredName(), entry.getValue().v1()); - builder.field(FETCH_EXCEPTIONS_ENTRY_EXCEPTION.getPreferredName()); + builder.field(READ_EXCEPTIONS_ENTRY_FROM_SEQ_NO.getPreferredName(), entry.getKey()); + builder.field(READ_EXCEPTIONS_RETRIES.getPreferredName(), entry.getValue().v1()); + builder.field(READ_EXCEPTIONS_ENTRY_EXCEPTION.getPreferredName()); builder.startObject(); { ElasticsearchException.generateThrowableXContent(builder, params, entry.getValue().v2()); @@ -511,9 +511,9 @@ public class ShardFollowNodeTaskStatus implements Task.Status { } builder.endArray(); builder.humanReadableField( - TIME_SINCE_LAST_FETCH_MILLIS_FIELD.getPreferredName(), - "time_since_last_fetch", - new TimeValue(timeSinceLastFetchMillis, TimeUnit.MILLISECONDS)); + TIME_SINCE_LAST_READ_MILLIS_FIELD.getPreferredName(), + "time_since_last_read", + new TimeValue(timeSinceLastReadMillis, TimeUnit.MILLISECONDS)); if (fatalException != null) { builder.field(FATAL_EXCEPTION.getPreferredName()); builder.startObject(); @@ -545,28 +545,28 @@ public class ShardFollowNodeTaskStatus implements Task.Status { followerGlobalCheckpoint == that.followerGlobalCheckpoint && followerMaxSeqNo == that.followerMaxSeqNo && lastRequestedSeqNo == that.lastRequestedSeqNo && - numberOfConcurrentReads == that.numberOfConcurrentReads && - numberOfConcurrentWrites == that.numberOfConcurrentWrites && - numberOfQueuedWrites == that.numberOfQueuedWrites && - bufferSize == that.bufferSize && - mappingVersion == that.mappingVersion && - totalFetchTimeMillis == that.totalFetchTimeMillis && - totalFetchRemoteTimeMillis == that.totalFetchRemoteTimeMillis && - numberOfSuccessfulFetches == that.numberOfSuccessfulFetches && - numberOfFailedFetches == that.numberOfFailedFetches && - operationsReceived == that.operationsReceived && - totalTransferredBytes == that.totalTransferredBytes && - numberOfSuccessfulBulkOperations == that.numberOfSuccessfulBulkOperations && - numberOfFailedBulkOperations == that.numberOfFailedBulkOperations && - numberOfOperationsIndexed == that.numberOfOperationsIndexed && + outstandingReadRequests == that.outstandingReadRequests && + outstandingWriteRequests == that.outstandingWriteRequests && + writeBufferOperationCount == that.writeBufferOperationCount && + writeBufferSizeInBytes == that.writeBufferSizeInBytes && + followerMappingVersion == that.followerMappingVersion && + totalReadTimeMillis == that.totalReadTimeMillis && + totalReadRemoteExecTimeMillis == that.totalReadRemoteExecTimeMillis && + successfulReadRequests == that.successfulReadRequests && + failedReadRequests == that.failedReadRequests && + operationsReads == that.operationsReads && + bytesRead == that.bytesRead && + successfulWriteRequests == that.successfulWriteRequests && + failedWriteRequests == that.failedWriteRequests && + operationWritten == that.operationWritten && /* * ElasticsearchException does not implement equals so we will assume the fetch exceptions are equal if they are equal * up to the key set and their messages. Note that we are relying on the fact that the fetch exceptions are ordered by * keys. */ - fetchExceptions.keySet().equals(that.fetchExceptions.keySet()) && - getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that)) && - timeSinceLastFetchMillis == that.timeSinceLastFetchMillis && + readExceptions.keySet().equals(that.readExceptions.keySet()) && + getReadExceptionMessages(this).equals(getReadExceptionMessages(that)) && + timeSinceLastReadMillis == that.timeSinceLastReadMillis && Objects.equals(fatalExceptionMessage, otherFatalExceptionMessage); } @@ -583,32 +583,32 @@ public class ShardFollowNodeTaskStatus implements Task.Status { followerGlobalCheckpoint, followerMaxSeqNo, lastRequestedSeqNo, - numberOfConcurrentReads, - numberOfConcurrentWrites, - numberOfQueuedWrites, - bufferSize, - mappingVersion, - totalFetchTimeMillis, - totalFetchRemoteTimeMillis, - numberOfSuccessfulFetches, - numberOfFailedFetches, - operationsReceived, - totalTransferredBytes, - numberOfSuccessfulBulkOperations, - numberOfFailedBulkOperations, - numberOfOperationsIndexed, + outstandingReadRequests, + outstandingWriteRequests, + writeBufferOperationCount, + writeBufferSizeInBytes, + followerMappingVersion, + totalReadTimeMillis, + totalReadRemoteExecTimeMillis, + successfulReadRequests, + failedReadRequests, + operationsReads, + bytesRead, + successfulWriteRequests, + failedWriteRequests, + operationWritten, /* * ElasticsearchException does not implement hash code so we will compute the hash code based on the key set and the * messages. Note that we are relying on the fact that the fetch exceptions are ordered by keys. */ - fetchExceptions.keySet(), - getFetchExceptionMessages(this), - timeSinceLastFetchMillis, + readExceptions.keySet(), + getReadExceptionMessages(this), + timeSinceLastReadMillis, fatalExceptionMessage); } - private static List getFetchExceptionMessages(final ShardFollowNodeTaskStatus status) { - return status.fetchExceptions().values().stream().map(t -> t.v2().getMessage()).collect(Collectors.toList()); + private static List getReadExceptionMessages(final ShardFollowNodeTaskStatus status) { + return status.readExceptions().values().stream().map(t -> t.v2().getMessage()).collect(Collectors.toList()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 8010c9bf344..b2f966bba74 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -52,14 +52,20 @@ public class PutAutoFollowPatternAction extends Action { PARSER.declareString(Request::setRemoteCluster, REMOTE_CLUSTER_FIELD); PARSER.declareStringArray(Request::setLeaderIndexPatterns, AutoFollowPattern.LEADER_PATTERNS_FIELD); PARSER.declareString(Request::setFollowIndexNamePattern, AutoFollowPattern.FOLLOW_PATTERN_FIELD); - PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT); - PARSER.declareInt(Request::setMaxConcurrentReadBatches, AutoFollowPattern.MAX_CONCURRENT_READ_BATCHES); + PARSER.declareInt(Request::setMaxReadRequestOperationCount, AutoFollowPattern.MAX_READ_REQUEST_OPERATION_COUNT); PARSER.declareField( - Request::setMaxBatchSize, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), AutoFollowPattern.MAX_BATCH_SIZE.getPreferredName()), - AutoFollowPattern.MAX_BATCH_SIZE, + Request::setMaxReadRequestSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), AutoFollowPattern.MAX_READ_REQUEST_SIZE.getPreferredName()), + AutoFollowPattern.MAX_READ_REQUEST_SIZE, ObjectParser.ValueType.STRING); - PARSER.declareInt(Request::setMaxConcurrentWriteBatches, AutoFollowPattern.MAX_CONCURRENT_WRITE_BATCHES); + PARSER.declareInt(Request::setMaxConcurrentReadBatches, AutoFollowPattern.MAX_OUTSTANDING_READ_REQUESTS); + PARSER.declareInt(Request::setMaxWriteRequestOperationCount, AutoFollowPattern.MAX_WRITE_REQUEST_OPERATION_COUNT); + PARSER.declareField( + Request::setMaxWriteRequestSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), AutoFollowPattern.MAX_WRITE_REQUEST_SIZE.getPreferredName()), + AutoFollowPattern.MAX_WRITE_REQUEST_SIZE, + ObjectParser.ValueType.STRING); + PARSER.declareInt(Request::setMaxConcurrentWriteBatches, AutoFollowPattern.MAX_OUTSTANDING_WRITE_REQUESTS); PARSER.declareInt(Request::setMaxWriteBufferCount, AutoFollowPattern.MAX_WRITE_BUFFER_COUNT); PARSER.declareField( Request::setMaxWriteBufferSize, @@ -69,9 +75,9 @@ public class PutAutoFollowPatternAction extends Action { PARSER.declareField(Request::setMaxRetryDelay, (p, c) -> TimeValue.parseTimeValue(p.text(), AutoFollowPattern.MAX_RETRY_DELAY.getPreferredName()), AutoFollowPattern.MAX_RETRY_DELAY, ObjectParser.ValueType.STRING); - PARSER.declareField(Request::setPollTimeout, - (p, c) -> TimeValue.parseTimeValue(p.text(), AutoFollowPattern.POLL_TIMEOUT.getPreferredName()), - AutoFollowPattern.POLL_TIMEOUT, ObjectParser.ValueType.STRING); + PARSER.declareField(Request::setReadPollTimeout, + (p, c) -> TimeValue.parseTimeValue(p.text(), AutoFollowPattern.READ_POLL_TIMEOUT.getPreferredName()), + AutoFollowPattern.READ_POLL_TIMEOUT, ObjectParser.ValueType.STRING); } public static Request fromXContent(XContentParser parser, String name) throws IOException { @@ -93,14 +99,16 @@ public class PutAutoFollowPatternAction extends Action { private List leaderIndexPatterns; private String followIndexNamePattern; - private Integer maxBatchOperationCount; + private Integer maxReadRequestOperationCount; + private ByteSizeValue maxReadRequestSize; private Integer maxConcurrentReadBatches; - private ByteSizeValue maxBatchSize; + private Integer maxWriteRequestOperationCount; + private ByteSizeValue maxWriteRequestSize; private Integer maxConcurrentWriteBatches; private Integer maxWriteBufferCount; private ByteSizeValue maxWriteBufferSize; private TimeValue maxRetryDelay; - private TimeValue pollTimeout; + private TimeValue readPollTimeout; @Override public ActionRequestValidationException validate() { @@ -164,12 +172,12 @@ public class PutAutoFollowPatternAction extends Action { this.followIndexNamePattern = followIndexNamePattern; } - public Integer getMaxBatchOperationCount() { - return maxBatchOperationCount; + public Integer getMaxReadRequestOperationCount() { + return maxReadRequestOperationCount; } - public void setMaxBatchOperationCount(Integer maxBatchOperationCount) { - this.maxBatchOperationCount = maxBatchOperationCount; + public void setMaxReadRequestOperationCount(Integer maxReadRequestOperationCount) { + this.maxReadRequestOperationCount = maxReadRequestOperationCount; } public Integer getMaxConcurrentReadBatches() { @@ -180,12 +188,28 @@ public class PutAutoFollowPatternAction extends Action { this.maxConcurrentReadBatches = maxConcurrentReadBatches; } - public ByteSizeValue getMaxBatchSize() { - return maxBatchSize; + public ByteSizeValue getMaxReadRequestSize() { + return maxReadRequestSize; } - public void setMaxBatchSize(ByteSizeValue maxBatchSize) { - this.maxBatchSize = maxBatchSize; + public void setMaxReadRequestSize(ByteSizeValue maxReadRequestSize) { + this.maxReadRequestSize = maxReadRequestSize; + } + + public Integer getMaxWriteRequestOperationCount() { + return maxWriteRequestOperationCount; + } + + public void setMaxWriteRequestOperationCount(Integer maxWriteRequestOperationCount) { + this.maxWriteRequestOperationCount = maxWriteRequestOperationCount; + } + + public ByteSizeValue getMaxWriteRequestSize() { + return maxWriteRequestSize; + } + + public void setMaxWriteRequestSize(ByteSizeValue maxWriteRequestSize) { + this.maxWriteRequestSize = maxWriteRequestSize; } public Integer getMaxConcurrentWriteBatches() { @@ -220,12 +244,12 @@ public class PutAutoFollowPatternAction extends Action { this.maxRetryDelay = maxRetryDelay; } - public TimeValue getPollTimeout() { - return pollTimeout; + public TimeValue getReadPollTimeout() { + return readPollTimeout; } - public void setPollTimeout(TimeValue pollTimeout) { - this.pollTimeout = pollTimeout; + public void setReadPollTimeout(TimeValue readPollTimeout) { + this.readPollTimeout = readPollTimeout; } @Override @@ -235,14 +259,16 @@ public class PutAutoFollowPatternAction extends Action { remoteCluster = in.readString(); leaderIndexPatterns = in.readList(StreamInput::readString); followIndexNamePattern = in.readOptionalString(); - maxBatchOperationCount = in.readOptionalVInt(); + maxReadRequestOperationCount = in.readOptionalVInt(); + maxReadRequestSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentReadBatches = in.readOptionalVInt(); - maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); + maxWriteRequestOperationCount = in.readOptionalVInt(); + maxWriteRequestSize = in.readOptionalWriteable(ByteSizeValue::new); maxConcurrentWriteBatches = in.readOptionalVInt(); maxWriteBufferCount = in.readOptionalVInt(); maxWriteBufferSize = in.readOptionalWriteable(ByteSizeValue::new); maxRetryDelay = in.readOptionalTimeValue(); - pollTimeout = in.readOptionalTimeValue(); + readPollTimeout = in.readOptionalTimeValue(); } @Override @@ -252,14 +278,16 @@ public class PutAutoFollowPatternAction extends Action { out.writeString(remoteCluster); out.writeStringList(leaderIndexPatterns); out.writeOptionalString(followIndexNamePattern); - out.writeOptionalVInt(maxBatchOperationCount); + out.writeOptionalVInt(maxReadRequestOperationCount); + out.writeOptionalWriteable(maxReadRequestSize); out.writeOptionalVInt(maxConcurrentReadBatches); - out.writeOptionalWriteable(maxBatchSize); + out.writeOptionalVInt(maxWriteRequestOperationCount); + out.writeOptionalWriteable(maxWriteRequestSize); out.writeOptionalVInt(maxConcurrentWriteBatches); out.writeOptionalVInt(maxWriteBufferCount); out.writeOptionalWriteable(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); - out.writeOptionalTimeValue(pollTimeout); + out.writeOptionalTimeValue(readPollTimeout); } @Override @@ -272,11 +300,17 @@ public class PutAutoFollowPatternAction extends Action { if (followIndexNamePattern != null) { builder.field(AutoFollowPattern.FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); } - if (maxBatchOperationCount != null) { - builder.field(AutoFollowPattern.MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); + if (maxReadRequestOperationCount != null) { + builder.field(AutoFollowPattern.MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName(), maxReadRequestOperationCount); } - if (maxBatchSize != null) { - builder.field(AutoFollowPattern.MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); + if (maxReadRequestSize != null) { + builder.field(AutoFollowPattern.MAX_READ_REQUEST_SIZE.getPreferredName(), maxReadRequestSize.getStringRep()); + } + if (maxWriteRequestOperationCount != null) { + builder.field(AutoFollowPattern.MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName(), maxWriteRequestOperationCount); + } + if (maxWriteRequestSize != null) { + builder.field(AutoFollowPattern.MAX_WRITE_REQUEST_SIZE.getPreferredName(), maxWriteRequestSize.getStringRep()); } if (maxWriteBufferCount != null) { builder.field(AutoFollowPattern.MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); @@ -285,16 +319,16 @@ public class PutAutoFollowPatternAction extends Action { builder.field(AutoFollowPattern.MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep()); } if (maxConcurrentReadBatches != null) { - builder.field(AutoFollowPattern.MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); + builder.field(AutoFollowPattern.MAX_OUTSTANDING_READ_REQUESTS.getPreferredName(), maxConcurrentReadBatches); } if (maxConcurrentWriteBatches != null) { - builder.field(AutoFollowPattern.MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); + builder.field(AutoFollowPattern.MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName(), maxConcurrentWriteBatches); } if (maxRetryDelay != null) { builder.field(AutoFollowPattern.MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay.getStringRep()); } - if (pollTimeout != null) { - builder.field(AutoFollowPattern.POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); + if (readPollTimeout != null) { + builder.field(AutoFollowPattern.READ_POLL_TIMEOUT.getPreferredName(), readPollTimeout.getStringRep()); } } builder.endObject(); @@ -310,14 +344,16 @@ public class PutAutoFollowPatternAction extends Action { Objects.equals(remoteCluster, request.remoteCluster) && Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) && Objects.equals(followIndexNamePattern, request.followIndexNamePattern) && - Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && + Objects.equals(maxReadRequestOperationCount, request.maxReadRequestOperationCount) && + Objects.equals(maxReadRequestSize, request.maxReadRequestSize) && Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && - Objects.equals(maxBatchSize, request.maxBatchSize) && + Objects.equals(maxWriteRequestOperationCount, request.maxWriteRequestOperationCount) && + Objects.equals(maxWriteRequestSize, request.maxWriteRequestSize) && Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && Objects.equals(maxWriteBufferCount, request.maxWriteBufferCount) && Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && Objects.equals(maxRetryDelay, request.maxRetryDelay) && - Objects.equals(pollTimeout, request.pollTimeout); + Objects.equals(readPollTimeout, request.readPollTimeout); } @Override @@ -327,14 +363,16 @@ public class PutAutoFollowPatternAction extends Action { remoteCluster, leaderIndexPatterns, followIndexNamePattern, - maxBatchOperationCount, + maxReadRequestOperationCount, + maxReadRequestSize, maxConcurrentReadBatches, - maxBatchSize, + maxWriteRequestOperationCount, + maxWriteRequestSize, maxConcurrentWriteBatches, maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, - pollTimeout); + readPollTimeout); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 0f36af4db10..b242b8cc8ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -27,14 +27,14 @@ import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.FOLLOWER_INDEX_FIELD; -import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_BATCH_OPERATION_COUNT; -import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_BATCH_SIZE; -import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_CONCURRENT_READ_BATCHES; -import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_CONCURRENT_WRITE_BATCHES; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_READ_REQUEST_OPERATION_COUNT; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_READ_REQUEST_SIZE; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_OUTSTANDING_READ_REQUESTS; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_OUTSTANDING_WRITE_REQUESTS; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_RETRY_DELAY_FIELD; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_BUFFER_COUNT; import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.MAX_WRITE_BUFFER_SIZE; -import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.POLL_TIMEOUT; +import static org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction.Request.READ_POLL_TIMEOUT; public final class PutFollowAction extends Action { @@ -64,30 +64,30 @@ public final class PutFollowAction extends Action { static { PARSER.declareString(Request::setRemoteCluster, REMOTE_CLUSTER_FIELD); PARSER.declareString(Request::setLeaderIndex, LEADER_INDEX_FIELD); - PARSER.declareString((request, value) -> request.followRequest.setFollowerIndex(value), FOLLOWER_INDEX_FIELD); - PARSER.declareInt((request, value) -> request.followRequest.setMaxBatchOperationCount(value), MAX_BATCH_OPERATION_COUNT); - PARSER.declareInt((request, value) -> request.followRequest.setMaxConcurrentReadBatches(value), MAX_CONCURRENT_READ_BATCHES); + PARSER.declareString((req, val) -> req.followRequest.setFollowerIndex(val), FOLLOWER_INDEX_FIELD); + PARSER.declareInt((req, val) -> req.followRequest.setMaxReadRequestOperationCount(val), MAX_READ_REQUEST_OPERATION_COUNT); + PARSER.declareInt((req, val) -> req.followRequest.setMaxOutstandingReadRequests(val), MAX_OUTSTANDING_READ_REQUESTS); PARSER.declareField( - (request, value) -> request.followRequest.setMaxBatchSize(value), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), - MAX_BATCH_SIZE, + (req, val) -> req.followRequest.setMaxReadRequestSize(val), + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_READ_REQUEST_SIZE.getPreferredName()), + MAX_READ_REQUEST_SIZE, ObjectParser.ValueType.STRING); - PARSER.declareInt((request, value) -> request.followRequest.setMaxConcurrentWriteBatches(value), MAX_CONCURRENT_WRITE_BATCHES); - PARSER.declareInt((request, value) -> request.followRequest.setMaxWriteBufferCount(value), MAX_WRITE_BUFFER_COUNT); + PARSER.declareInt((req, val) -> req.followRequest.setMaxOutstandingWriteRequests(val), MAX_OUTSTANDING_WRITE_REQUESTS); + PARSER.declareInt((req, val) -> req.followRequest.setMaxWriteBufferCount(val), MAX_WRITE_BUFFER_COUNT); PARSER.declareField( - (request, value) -> request.followRequest.setMaxWriteBufferSize(value), + (req, val) -> req.followRequest.setMaxWriteBufferSize(val), (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_BUFFER_SIZE.getPreferredName()), MAX_WRITE_BUFFER_SIZE, ObjectParser.ValueType.STRING); PARSER.declareField( - (request, value) -> request.followRequest.setMaxRetryDelay(value), + (req, val) -> req.followRequest.setMaxRetryDelay(val), (p, c) -> TimeValue.parseTimeValue(p.text(), MAX_RETRY_DELAY_FIELD.getPreferredName()), MAX_RETRY_DELAY_FIELD, ObjectParser.ValueType.STRING); PARSER.declareField( - (request, value) -> request.followRequest.setPollTimeout(value), - (p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()), - POLL_TIMEOUT, + (req, val) -> req.followRequest.setReadPollTimeout(val), + (p, c) -> TimeValue.parseTimeValue(p.text(), READ_POLL_TIMEOUT.getPreferredName()), + READ_POLL_TIMEOUT, ObjectParser.ValueType.STRING); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index 587223e3fbc..11c46492cc0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -44,26 +44,31 @@ public final class ResumeFollowAction extends Action { public static class Request extends ActionRequest implements ToXContentObject { static final ParseField FOLLOWER_INDEX_FIELD = new ParseField("follower_index"); - static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); - static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); - static final ParseField MAX_BATCH_SIZE = new ParseField("max_batch_size"); - static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + static final ParseField MAX_READ_REQUEST_OPERATION_COUNT = new ParseField("max_read_request_operation_count"); + static final ParseField MAX_READ_REQUEST_SIZE = new ParseField("max_read_request_size"); + static final ParseField MAX_OUTSTANDING_READ_REQUESTS = new ParseField("max_outstanding_read_requests"); + static final ParseField MAX_WRITE_REQUEST_OPERATION_COUNT = new ParseField("max_write_request_operation_count"); + static final ParseField MAX_WRITE_REQUEST_SIZE = new ParseField("max_write_request_size"); + static final ParseField MAX_OUTSTANDING_WRITE_REQUESTS = new ParseField("max_outstanding_write_requests"); static final ParseField MAX_WRITE_BUFFER_COUNT = new ParseField("max_write_buffer_count"); static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); static final ParseField MAX_RETRY_DELAY_FIELD = new ParseField("max_retry_delay"); - static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout"); + static final ParseField READ_POLL_TIMEOUT = new ParseField("read_poll_timeout"); static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); static { PARSER.declareString(Request::setFollowerIndex, FOLLOWER_INDEX_FIELD); - PARSER.declareInt(Request::setMaxBatchOperationCount, MAX_BATCH_OPERATION_COUNT); - PARSER.declareInt(Request::setMaxConcurrentReadBatches, MAX_CONCURRENT_READ_BATCHES); + PARSER.declareInt(Request::setMaxReadRequestOperationCount, MAX_READ_REQUEST_OPERATION_COUNT); PARSER.declareField( - Request::setMaxBatchSize, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_BATCH_SIZE.getPreferredName()), - MAX_BATCH_SIZE, + Request::setMaxReadRequestSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_READ_REQUEST_SIZE.getPreferredName()), MAX_READ_REQUEST_SIZE, ObjectParser.ValueType.STRING); - PARSER.declareInt(Request::setMaxConcurrentWriteBatches, MAX_CONCURRENT_WRITE_BATCHES); + PARSER.declareInt(Request::setMaxOutstandingReadRequests, MAX_OUTSTANDING_READ_REQUESTS); + PARSER.declareInt(Request::setMaxWriteRequestOperationCount, MAX_WRITE_REQUEST_OPERATION_COUNT); + PARSER.declareField(Request::setMaxWriteRequestSize, + (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_WRITE_REQUEST_SIZE.getPreferredName()), MAX_WRITE_REQUEST_SIZE, + ObjectParser.ValueType.STRING); + PARSER.declareInt(Request::setMaxOutstandingWriteRequests, MAX_OUTSTANDING_WRITE_REQUESTS); PARSER.declareInt(Request::setMaxWriteBufferCount, MAX_WRITE_BUFFER_COUNT); PARSER.declareField( Request::setMaxWriteBufferSize, @@ -76,9 +81,9 @@ public final class ResumeFollowAction extends Action { MAX_RETRY_DELAY_FIELD, ObjectParser.ValueType.STRING); PARSER.declareField( - Request::setPollTimeout, - (p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()), - POLL_TIMEOUT, + Request::setReadPollTimeout, + (p, c) -> TimeValue.parseTimeValue(p.text(), READ_POLL_TIMEOUT.getPreferredName()), + READ_POLL_TIMEOUT, ObjectParser.ValueType.STRING); } @@ -106,44 +111,64 @@ public final class ResumeFollowAction extends Action { this.followerIndex = followerIndex; } - private Integer maxBatchOperationCount; + private Integer maxReadRequestOperationCount; - public Integer getMaxBatchOperationCount() { - return maxBatchOperationCount; + public Integer getMaxReadRequestOperationCount() { + return maxReadRequestOperationCount; } - public void setMaxBatchOperationCount(Integer maxBatchOperationCount) { - this.maxBatchOperationCount = maxBatchOperationCount; + public void setMaxReadRequestOperationCount(Integer maxReadRequestOperationCount) { + this.maxReadRequestOperationCount = maxReadRequestOperationCount; } - private Integer maxConcurrentReadBatches; + private Integer maxOutstandingReadRequests; - public Integer getMaxConcurrentReadBatches() { - return maxConcurrentReadBatches; + public Integer getMaxOutstandingReadRequests() { + return maxOutstandingReadRequests; } - public void setMaxConcurrentReadBatches(Integer maxConcurrentReadBatches) { - this.maxConcurrentReadBatches = maxConcurrentReadBatches; + public void setMaxOutstandingReadRequests(Integer maxOutstandingReadRequests) { + this.maxOutstandingReadRequests = maxOutstandingReadRequests; } - private ByteSizeValue maxBatchSize; + private ByteSizeValue maxReadRequestSize; - public ByteSizeValue getMaxBatchSize() { - return maxBatchSize; + public ByteSizeValue getMaxReadRequestSize() { + return maxReadRequestSize; } - public void setMaxBatchSize(ByteSizeValue maxBatchSize) { - this.maxBatchSize = maxBatchSize; + public void setMaxReadRequestSize(ByteSizeValue maxReadRequestSize) { + this.maxReadRequestSize = maxReadRequestSize; } - private Integer maxConcurrentWriteBatches; + private Integer maxWriteRequestOperationCount; - public Integer getMaxConcurrentWriteBatches() { - return maxConcurrentWriteBatches; + public Integer getMaxWriteRequestOperationCount() { + return maxWriteRequestOperationCount; } - public void setMaxConcurrentWriteBatches(Integer maxConcurrentWriteBatches) { - this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; + public void setMaxWriteRequestOperationCount(Integer maxWriteRequestOperationCount) { + this.maxWriteRequestOperationCount = maxWriteRequestOperationCount; + } + + private ByteSizeValue maxWriteRequestSize; + + public ByteSizeValue getMaxWriteRequestSize() { + return maxWriteRequestSize; + } + + public void setMaxWriteRequestSize(ByteSizeValue maxWriteRequestSize) { + this.maxWriteRequestSize = maxWriteRequestSize; + } + + private Integer maxOutstandingWriteRequests; + + public Integer getMaxOutstandingWriteRequests() { + return maxOutstandingWriteRequests; + } + + public void setMaxOutstandingWriteRequests(Integer maxOutstandingWriteRequests) { + this.maxOutstandingWriteRequests = maxOutstandingWriteRequests; } private Integer maxWriteBufferCount; @@ -176,14 +201,14 @@ public final class ResumeFollowAction extends Action { return maxRetryDelay; } - private TimeValue pollTimeout; + private TimeValue readPollTimeout; - public TimeValue getPollTimeout() { - return pollTimeout; + public TimeValue getReadPollTimeout() { + return readPollTimeout; } - public void setPollTimeout(TimeValue pollTimeout) { - this.pollTimeout = pollTimeout; + public void setReadPollTimeout(TimeValue readPollTimeout) { + this.readPollTimeout = readPollTimeout; } public Request() { @@ -196,17 +221,23 @@ public final class ResumeFollowAction extends Action { if (followerIndex == null) { e = addValidationError(FOLLOWER_INDEX_FIELD.getPreferredName() + " is missing", e); } - if (maxBatchOperationCount != null && maxBatchOperationCount < 1) { - e = addValidationError(MAX_BATCH_OPERATION_COUNT.getPreferredName() + " must be larger than 0", e); + if (maxReadRequestOperationCount != null && maxReadRequestOperationCount < 1) { + e = addValidationError(MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName() + " must be larger than 0", e); } - if (maxConcurrentReadBatches != null && maxConcurrentReadBatches < 1) { - e = addValidationError(MAX_CONCURRENT_READ_BATCHES.getPreferredName() + " must be larger than 0", e); + if (maxReadRequestSize != null && maxReadRequestSize.compareTo(ByteSizeValue.ZERO) <= 0) { + e = addValidationError(MAX_READ_REQUEST_SIZE.getPreferredName() + " must be larger than 0", e); } - if (maxBatchSize != null && maxBatchSize.compareTo(ByteSizeValue.ZERO) <= 0) { - e = addValidationError(MAX_BATCH_SIZE.getPreferredName() + " must be larger than 0", e); + if (maxOutstandingReadRequests != null && maxOutstandingReadRequests < 1) { + e = addValidationError(MAX_OUTSTANDING_READ_REQUESTS.getPreferredName() + " must be larger than 0", e); } - if (maxConcurrentWriteBatches != null && maxConcurrentWriteBatches < 1) { - e = addValidationError(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName() + " must be larger than 0", e); + if (maxWriteRequestOperationCount != null && maxWriteRequestOperationCount < 1) { + e = addValidationError(MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName() + " must be larger than 0", e); + } + if (maxWriteRequestSize != null && maxWriteRequestSize.compareTo(ByteSizeValue.ZERO) <= 0) { + e = addValidationError(MAX_WRITE_REQUEST_SIZE.getPreferredName() + " must be larger than 0", e); + } + if (maxOutstandingWriteRequests != null && maxOutstandingWriteRequests < 1) { + e = addValidationError(MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName() + " must be larger than 0", e); } if (maxWriteBufferCount != null && maxWriteBufferCount < 1) { e = addValidationError(MAX_WRITE_BUFFER_COUNT.getPreferredName() + " must be larger than 0", e); @@ -232,28 +263,28 @@ public final class ResumeFollowAction extends Action { public void readFrom(final StreamInput in) throws IOException { super.readFrom(in); followerIndex = in.readString(); - maxBatchOperationCount = in.readOptionalVInt(); - maxConcurrentReadBatches = in.readOptionalVInt(); - maxBatchSize = in.readOptionalWriteable(ByteSizeValue::new); - maxConcurrentWriteBatches = in.readOptionalVInt(); + maxReadRequestOperationCount = in.readOptionalVInt(); + maxOutstandingReadRequests = in.readOptionalVInt(); + maxReadRequestSize = in.readOptionalWriteable(ByteSizeValue::new); + maxOutstandingWriteRequests = in.readOptionalVInt(); maxWriteBufferCount = in.readOptionalVInt(); maxWriteBufferSize = in.readOptionalWriteable(ByteSizeValue::new); maxRetryDelay = in.readOptionalTimeValue(); - pollTimeout = in.readOptionalTimeValue(); + readPollTimeout = in.readOptionalTimeValue(); } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); out.writeString(followerIndex); - out.writeOptionalVInt(maxBatchOperationCount); - out.writeOptionalVInt(maxConcurrentReadBatches); - out.writeOptionalWriteable(maxBatchSize); - out.writeOptionalVInt(maxConcurrentWriteBatches); + out.writeOptionalVInt(maxReadRequestOperationCount); + out.writeOptionalVInt(maxOutstandingReadRequests); + out.writeOptionalWriteable(maxReadRequestSize); + out.writeOptionalVInt(maxOutstandingWriteRequests); out.writeOptionalVInt(maxWriteBufferCount); out.writeOptionalWriteable(maxWriteBufferSize); out.writeOptionalTimeValue(maxRetryDelay); - out.writeOptionalTimeValue(pollTimeout); + out.writeOptionalTimeValue(readPollTimeout); } @Override @@ -268,11 +299,17 @@ public final class ResumeFollowAction extends Action { void toXContentFragment(final XContentBuilder builder, final Params params) throws IOException { builder.field(FOLLOWER_INDEX_FIELD.getPreferredName(), followerIndex); - if (maxBatchOperationCount != null) { - builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); + if (maxReadRequestOperationCount != null) { + builder.field(MAX_READ_REQUEST_OPERATION_COUNT.getPreferredName(), maxReadRequestOperationCount); } - if (maxBatchSize != null) { - builder.field(MAX_BATCH_SIZE.getPreferredName(), maxBatchSize.getStringRep()); + if (maxReadRequestSize != null) { + builder.field(MAX_READ_REQUEST_SIZE.getPreferredName(), maxReadRequestSize.getStringRep()); + } + if (maxWriteRequestOperationCount != null) { + builder.field(MAX_WRITE_REQUEST_OPERATION_COUNT.getPreferredName(), maxWriteRequestOperationCount); + } + if (maxWriteRequestSize != null) { + builder.field(MAX_WRITE_REQUEST_SIZE.getPreferredName(), maxWriteRequestSize.getStringRep()); } if (maxWriteBufferCount != null) { builder.field(MAX_WRITE_BUFFER_COUNT.getPreferredName(), maxWriteBufferCount); @@ -280,17 +317,17 @@ public final class ResumeFollowAction extends Action { if (maxWriteBufferSize != null) { builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize.getStringRep()); } - if (maxConcurrentReadBatches != null) { - builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); + if (maxOutstandingReadRequests != null) { + builder.field(MAX_OUTSTANDING_READ_REQUESTS.getPreferredName(), maxOutstandingReadRequests); } - if (maxConcurrentWriteBatches != null) { - builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); + if (maxOutstandingWriteRequests != null) { + builder.field(MAX_OUTSTANDING_WRITE_REQUESTS.getPreferredName(), maxOutstandingWriteRequests); } if (maxRetryDelay != null) { builder.field(MAX_RETRY_DELAY_FIELD.getPreferredName(), maxRetryDelay.getStringRep()); } - if (pollTimeout != null) { - builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep()); + if (readPollTimeout != null) { + builder.field(READ_POLL_TIMEOUT.getPreferredName(), readPollTimeout.getStringRep()); } } @@ -299,14 +336,16 @@ public final class ResumeFollowAction extends Action { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; - return Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && - Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && - Objects.equals(maxBatchSize, request.maxBatchSize) && - Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && + return Objects.equals(maxReadRequestOperationCount, request.maxReadRequestOperationCount) && + Objects.equals(maxReadRequestSize, request.maxReadRequestSize) && + Objects.equals(maxOutstandingReadRequests, request.maxOutstandingReadRequests) && + Objects.equals(maxWriteRequestOperationCount, request.maxWriteRequestOperationCount) && + Objects.equals(maxWriteRequestSize, request.maxWriteRequestSize) && + Objects.equals(maxOutstandingWriteRequests, request.maxOutstandingWriteRequests) && Objects.equals(maxWriteBufferCount, request.maxWriteBufferCount) && Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && Objects.equals(maxRetryDelay, request.maxRetryDelay) && - Objects.equals(pollTimeout, request.pollTimeout) && + Objects.equals(readPollTimeout, request.readPollTimeout) && Objects.equals(followerIndex, request.followerIndex); } @@ -314,14 +353,16 @@ public final class ResumeFollowAction extends Action { public int hashCode() { return Objects.hash( followerIndex, - maxBatchOperationCount, - maxConcurrentReadBatches, - maxBatchSize, - maxConcurrentWriteBatches, + maxReadRequestOperationCount, + maxReadRequestSize, + maxOutstandingReadRequests, + maxWriteRequestOperationCount, + maxWriteRequestSize, + maxOutstandingWriteRequests, maxWriteBufferCount, maxWriteBufferSize, maxRetryDelay, - pollTimeout); + readPollTimeout); } } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 4c2a9792440..bdd16d3b58c 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -956,52 +956,52 @@ "last_requested_seq_no": { "type": "long" }, - "number_of_concurrent_reads": { + "outstanding_read_requests": { "type": "long" }, - "number_of_concurrent_writes": { + "outstanding_write_requests": { "type": "long" }, - "number_of_queued_writes": { + "write_buffer_operation_count": { "type": "long" }, - "buffer_size_in_bytes": { + "write_buffer_size_in_bytes": { "type": "long" }, - "mapping_version": { + "follower_mapping_version": { "type": "long" }, - "total_fetch_time_millis": { + "total_read_time_millis": { "type": "long" }, - "total_fetch_remote_time_millis": { + "total_read_remote_exec_time_millis": { "type": "long" }, - "number_of_successful_fetches": { + "successful_read_requests": { "type": "long" }, - "number_of_failed_fetches": { + "failed_read_requests": { "type": "long" }, - "operations_received": { + "operations_read": { "type": "long" }, - "total_transferred_bytes": { + "bytes_read": { "type": "long" }, - "total_index_time_millis": { + "total_write_time_millis": { "type": "long" }, - "number_of_successful_bulk_operations": { + "successful_write_requests": { "type": "long" }, - "number_of_failed_bulk_operations": { + "failed_write_requests": { "type": "long" }, - "number_of_operations_indexed": { + "operations_written": { "type": "long" }, - "fetch_exceptions": { + "read_exceptions": { "type": "nested", "properties": { "from_seq_no": { @@ -1023,7 +1023,7 @@ } } }, - "time_since_last_fetch_millis": { + "time_since_last_read_millis": { "type": "long" } } From d6dc62ef037a29c32963532a8798185ee412b747 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=8C=AF=E8=8D=9F?= <605951224@qq.com> Date: Thu, 25 Oct 2018 17:55:14 +0900 Subject: [PATCH 56/67] [Docs] Add explanation for code snippets line width (#34796) Add "Line width for code snippets is 76 characters" to Java Language Formatting Guidelines --- CONTRIBUTING.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 172de8b008a..fcfb14b2b79 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -159,6 +159,7 @@ Please follow these formatting guidelines: * Java indent is 4 spaces * Line width is 140 characters +* Line width for code snippets that are included in the documentation (the ones surrounded by `// tag` and `// end` comments) is 76 characters * The rest is left to Java coding standards * Disable “auto-format on save” to prevent unnecessary format changes. This makes reviews much harder as it generates unnecessary formatting changes. If your IDE supports formatting only modified chunks that is fine to do. * Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. This can be done automatically by your IDE: From a7e08f462f103d0414635ed05cf728a4fbfc79de Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Thu, 25 Oct 2018 12:14:49 +0300 Subject: [PATCH 57/67] SQL: handle X-Pack or X-Pack SQL not being available in a more graceful way (#34736) Throw a different error message for a http response code of 400, but also when the error itself is of a specific type. --- .../xpack/sql/client/JreHttpUrlConnection.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java index 0dca4a88f05..3f894ae59af 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/JreHttpUrlConnection.java @@ -47,6 +47,7 @@ public class JreHttpUrlConnection implements Closeable { * error. */ public static final String SQL_STATE_BAD_SERVER = "bad_server"; + private static final String SQL_NOT_AVAILABLE_ERROR_MESSAGE = "request [/_xpack/sql] contains unrecognized parameter: [mode]"; public static R http(String path, String query, ConnectionConfiguration cfg, Function handler) { final URI uriPath = cfg.baseUri().resolve(path); // update path if needed @@ -176,6 +177,19 @@ public class JreHttpUrlConnection implements Closeable { } SqlExceptionType type = SqlExceptionType.fromRemoteFailureType(failure.type()); if (type == null) { + // check if x-pack or sql are not available (x-pack not installed or sql not enabled) + // by checking the error message the server is sending back + if (con.getResponseCode() >= HttpURLConnection.HTTP_BAD_REQUEST + && failure.reason().contains(SQL_NOT_AVAILABLE_ERROR_MESSAGE)) { + return new ResponseOrException<>(new SQLException("X-Pack/SQL do not seem to be available" + + " on the Elasticsearch node using the access path '" + + con.getURL().getHost() + + (con.getURL().getPort() > 0 ? ":" + con.getURL().getPort() : "") + + "'." + + " Please verify X-Pack is installed and SQL enabled. Alternatively, check if any proxy is interfering" + + " the communication to Elasticsearch", + SQL_STATE_BAD_SERVER)); + } return new ResponseOrException<>(new SQLException("Server sent bad type [" + failure.type() + "]. Original type was [" + failure.reason() + "]. [" + failure.remoteTrace() + "]", SQL_STATE_BAD_SERVER)); From b97546a5a9d9ab0556fc21854a7ed7c48592c7fe Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 25 Oct 2018 12:40:00 +0300 Subject: [PATCH 58/67] SQL: Introduce ODBC mode, similar to JDBC (#34825) Close #34720 --- .../license/XPackLicenseState.java | 17 ++++++++++++++++- .../xpack/sql/action/SqlQueryResponse.java | 2 +- .../org/elasticsearch/xpack/sql/proto/Mode.java | 7 ++++++- .../xpack/sql/plugin/SqlPlugin.java | 5 +++++ .../sql/plugin/TransportSqlQueryAction.java | 4 ++-- 5 files changed, 30 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 1fe4ebf0850..242a925ab1c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -239,7 +239,8 @@ public class XPackLicenseState { switch (currentMode) { case TRIAL: case PLATINUM: - return new String[] { "JDBC support will be disabled, but you can continue to use SQL CLI and REST endpoint" }; + return new String[] { + "JDBC and ODBC support will be disabled, but you can continue to use SQL CLI and REST endpoint" }; } break; } @@ -628,6 +629,20 @@ public class XPackLicenseState { return licensed && localStatus.active; } + /** + * Determine if ODBC support should be enabled. + *

+ * ODBC is available only in for {@link OperationMode#PLATINUM} and {@link OperationMode#TRIAL} licences + */ + public synchronized boolean isOdbcAllowed() { + Status localStatus = status; + OperationMode operationMode = localStatus.mode; + + boolean licensed = operationMode == OperationMode.TRIAL || operationMode == OperationMode.PLATINUM; + + return licensed && localStatus.active; + } + public synchronized boolean isTrialLicense() { return status.mode == OperationMode.TRIAL; } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java index 970be02e385..da4037ac95c 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryResponse.java @@ -167,7 +167,7 @@ public class SqlQueryResponse extends ActionResponse implements ToXContentObject * Serializes the provided value in SQL-compatible way based on the client mode */ public static XContentBuilder value(XContentBuilder builder, Mode mode, Object value) throws IOException { - if (mode == Mode.JDBC && value instanceof ReadableDateTime) { + if (Mode.isDriver(mode) && value instanceof ReadableDateTime) { // JDBC cannot parse dates in string format builder.value(((ReadableDateTime) value).getMillis()); } else { diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java index 02f175ca80d..598c52a9179 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Mode.java @@ -13,7 +13,8 @@ import java.util.Locale; */ public enum Mode { PLAIN, - JDBC; + JDBC, + ODBC; public static Mode fromString(String mode) { if (mode == null) { @@ -27,4 +28,8 @@ public enum Mode { public String toString() { return this.name().toLowerCase(Locale.ROOT); } + + public static boolean isDriver(Mode mode) { + return mode == JDBC || mode == ODBC; + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java index 6c026b26071..b22abaa65d7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java @@ -64,6 +64,11 @@ public class SqlPlugin extends Plugin implements ActionPlugin { throw LicenseUtils.newComplianceException("jdbc"); } break; + case ODBC: + if (licenseState.isOdbcAllowed() == false) { + throw LicenseUtils.newComplianceException("odbc"); + } + break; case PLAIN: if (licenseState.isSqlAllowed() == false) { throw LicenseUtils.newComplianceException(XPackField.SQL); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index e491f76749b..689dd365f76 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.sql.action.SqlQueryRequest; import org.elasticsearch.xpack.sql.action.SqlQueryResponse; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; +import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Cursors; import org.elasticsearch.xpack.sql.session.RowSet; @@ -30,7 +31,6 @@ import java.util.ArrayList; import java.util.List; import static java.util.Collections.unmodifiableList; -import static org.elasticsearch.xpack.sql.proto.Mode.JDBC; public class TransportSqlQueryAction extends HandledTransportAction { private final PlanExecutor planExecutor; @@ -73,7 +73,7 @@ public class TransportSqlQueryAction extends HandledTransportAction columns = new ArrayList<>(rowSet.columnCount()); for (Schema.Entry entry : rowSet.schema()) { - if (request.mode() == JDBC) { + if (Mode.isDriver(request.mode())) { columns.add(new ColumnInfo("", entry.name(), entry.type().esType, entry.type().jdbcType, entry.type().displaySize)); } else { From a69c540f12daded596ce2db931b0d3fbbe19c942 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Thu, 25 Oct 2018 13:37:23 +0200 Subject: [PATCH 59/67] [Test] Mute FullClusterRestartIT.testShrink() until test is fixed See https://github.com/elastic/elasticsearch/pull/34853 --- .../java/org/elasticsearch/upgrades/FullClusterRestartIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index ce66800d892..6f878d24c87 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -325,6 +325,7 @@ public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/34853") public void testShrink() throws IOException { String shrunkenIndex = index + "_shrunk"; int numDocs; From bd143334d3ec773d5e47090233dd6218e4e4bf98 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Thu, 25 Oct 2018 14:24:11 +0200 Subject: [PATCH 60/67] SQL: Fix edge case: ` IN (null)` (#34802) Handle the case when `null` is the only value in the list so that it's translated to a `MatchNoDocsQuery`. Followup to: #34750 --- .../xpack/sql/expression/predicate/In.java | 10 +++++++++- .../xpack/sql/querydsl/query/TermsQuery.java | 11 ++++++++--- .../xpack/sql/optimizer/OptimizerTests.java | 14 +++++++++++++- .../xpack/sql/planner/QueryFolderTests.java | 10 ++++++++++ .../xpack/sql/planner/QueryTranslatorTests.java | 2 +- 5 files changed, 41 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java index 4ce1088f806..9b16b77511c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/In.java @@ -78,11 +78,19 @@ public class In extends NamedExpression implements ScriptWeaver { @Override public boolean foldable() { - return Expressions.foldable(children()); + return Expressions.foldable(children()) || + (Expressions.foldable(list) && list().stream().allMatch(e -> e.dataType() == DataType.NULL)); } @Override public Boolean fold() { + if (value.dataType() == DataType.NULL) { + return null; + } + if (list.size() == 1 && list.get(0).dataType() == DataType.NULL) { + return false; + } + Object foldedLeftValue = value.fold(); Boolean result = false; for (Expression rightValue : list) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java index 4366e2d404c..91ea49a8a3c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/query/TermsQuery.java @@ -11,23 +11,28 @@ import org.elasticsearch.xpack.sql.expression.Foldables; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; +import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Objects; +import java.util.Set; import static org.elasticsearch.index.query.QueryBuilders.termsQuery; public class TermsQuery extends LeafQuery { private final String term; - private final LinkedHashSet values; + private final Set values; public TermsQuery(Location location, String term, List values) { super(location); this.term = term; values.removeIf(e -> e.dataType() == DataType.NULL); - this.values = new LinkedHashSet<>(Foldables.valuesOf(values, values.get(0).dataType())); - this.values.removeIf(Objects::isNull); + if (values.isEmpty()) { + this.values = Collections.emptySet(); + } else { + this.values = new LinkedHashSet<>(Foldables.valuesOf(values, values.get(0).dataType())); + } } @Override diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index acd0378ee01..137f7b68d7a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -95,7 +95,7 @@ public class OptimizerTests extends ESTestCase { private static final Literal FOUR = L(4); private static final Literal FIVE = L(5); private static final Literal SIX = L(6); - + private static final Literal NULL = L(null); public static class DummyBooleanExpression extends Expression { @@ -323,6 +323,18 @@ public class OptimizerTests extends ESTestCase { assertThat(Foldables.valuesOf(in.list(), DataType.INTEGER), contains(1 ,2 ,3 ,4)); } + public void testConstantFoldingIn_RightValueIsNull() { + In in = new In(EMPTY, getFieldAttribute(), Arrays.asList(NULL, NULL)); + Literal result= (Literal) new ConstantFolding().rule(in); + assertEquals(false, result.value()); + } + + public void testConstantFoldingIn_LeftValueIsNull() { + In in = new In(EMPTY, NULL, Arrays.asList(ONE, TWO, THREE)); + Literal result= (Literal) new ConstantFolding().rule(in); + assertNull(result.value()); + } + public void testArithmeticFolding() { assertEquals(10, foldOperator(new Add(EMPTY, L(7), THREE))); assertEquals(4, foldOperator(new Sub(EMPTY, L(7), THREE))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index b6643fb7d47..5fac14e2397 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -64,6 +64,16 @@ public class QueryFolderTests extends AbstractBuilderTestCase { assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); } + public void testFoldingToLocalExecWithProject_FoldableIn() { + PhysicalPlan p = plan("SELECT keyword FROM test WHERE int IN (null, null)"); + assertEquals(LocalExec.class, p.getClass()); + LocalExec le = (LocalExec) p; + assertEquals(EmptyExecutable.class, le.executable().getClass()); + EmptyExecutable ee = (EmptyExecutable) le.executable(); + assertEquals(1, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); + } + public void testFoldingToLocalExecWithProject_WithOrderAndLimit() { PhysicalPlan p = plan("SELECT keyword FROM test WHERE 1 = 2 ORDER BY int LIMIT 10"); assertEquals(LocalExec.class, p.getClass()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 95b9be33a12..c1e5a0d2daf 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -173,7 +173,7 @@ public class QueryTranslatorTests extends AbstractBuilderTestCase { assertEquals("keyword:(bar foo lala)", tq.asBuilder().toQuery(createShardContext()).toString()); } - public void testTranslateInExpression_WhereClauseAndNullHAndling() throws IOException { + public void testTranslateInExpression_WhereClauseAndNullHandling() throws IOException { LogicalPlan p = plan("SELECT * FROM test WHERE keyword IN ('foo', null, 'lala', null, 'foo', concat('la', 'la'))"); assertTrue(p instanceof Project); assertTrue(p.children().get(0) instanceof Filter); From 185c06bb7f2bd7bff491738b01ed7716c69a70db Mon Sep 17 00:00:00 2001 From: lipsill <39668292+lipsill@users.noreply.github.com> Date: Thu, 25 Oct 2018 15:52:41 +0200 Subject: [PATCH 61/67] Logging: tests: clean up logging (#34606) Replace internal deprecated calls to `Loggers.getLogger(Class)` with direct calls to log4j `LogManager.getLogger(Class)` --- .../elasticsearch/action/search/MockSearchPhaseContext.java | 4 ++-- .../cluster/allocation/AwarenessAllocationIT.java | 5 +++-- .../elasticsearch/cluster/allocation/ClusterRerouteIT.java | 5 +++-- .../cluster/allocation/FilteringAllocationIT.java | 4 ++-- .../cluster/routing/allocation/AddIncrementallyTests.java | 5 +++-- .../cluster/routing/allocation/AllocationCommandsTests.java | 4 ++-- .../cluster/routing/allocation/AwarenessAllocationTests.java | 4 ++-- .../routing/allocation/BalanceConfigurationTests.java | 5 +++-- .../routing/allocation/ClusterRebalanceRoutingTests.java | 4 ++-- .../routing/allocation/ConcurrentRebalanceRoutingTests.java | 4 ++-- .../cluster/routing/allocation/DeadNodesAllocationTests.java | 4 ++-- .../ElectReplicaAsPrimaryDuringRelocationTests.java | 4 ++-- .../routing/allocation/ExpectedShardSizeAllocationTests.java | 4 ++-- .../cluster/routing/allocation/FailedNodeRoutingTests.java | 4 ++-- .../cluster/routing/allocation/FailedShardsRoutingTests.java | 5 +++-- .../cluster/routing/allocation/IndexBalanceTests.java | 4 ++-- .../allocation/NodeVersionAllocationDeciderTests.java | 4 ++-- .../routing/allocation/PreferPrimaryAllocationTests.java | 4 ++-- .../routing/allocation/PrimaryElectionRoutingTests.java | 4 ++-- .../PrimaryNotRelocatedWhileBeingRecoveredTests.java | 4 ++-- .../routing/allocation/RebalanceAfterActiveTests.java | 4 ++-- .../allocation/ReplicaAllocatedAfterPrimaryTests.java | 4 ++-- .../routing/allocation/RoutingNodesIntegrityTests.java | 4 ++-- .../cluster/routing/allocation/SameShardRoutingTests.java | 4 ++-- .../cluster/routing/allocation/ShardVersioningTests.java | 4 ++-- .../routing/allocation/ShardsLimitAllocationTests.java | 4 ++-- .../allocation/SingleShardNoReplicasRoutingTests.java | 4 ++-- .../allocation/SingleShardOneReplicaRoutingTests.java | 4 ++-- .../routing/allocation/TenShardsOneReplicaRoutingTests.java | 4 ++-- .../routing/allocation/ThrottlingAllocationTests.java | 5 +++-- .../routing/allocation/UpdateNumberOfReplicasTests.java | 4 ++-- .../routing/allocation/decider/EnableAllocationTests.java | 5 +++-- server/src/test/java/org/elasticsearch/common/UUIDTests.java | 4 ++-- .../elasticsearch/common/logging/DeprecationLoggerTests.java | 4 +++- .../java/org/elasticsearch/common/logging/LoggersTests.java | 3 ++- .../java/org/elasticsearch/gateway/AsyncShardFetchTests.java | 4 ++-- .../java/org/elasticsearch/gateway/GatewayIndexStateIT.java | 4 ++-- .../org/elasticsearch/indices/state/SimpleIndexStateIT.java | 4 ++-- .../org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java | 4 ++-- .../java/org/elasticsearch/search/SearchCancellationIT.java | 4 ++-- .../search/aggregations/metrics/HDRPercentileRanksIT.java | 4 ++-- .../search/aggregations/metrics/HDRPercentilesIT.java | 4 ++-- .../aggregations/metrics/TDigestPercentileRanksIT.java | 4 ++-- .../search/aggregations/metrics/TDigestPercentilesIT.java | 4 ++-- .../org/elasticsearch/transport/TransportLoggerTests.java | 5 +++-- .../elasticsearch/index/store/EsBaseDirectoryTestCase.java | 5 +++-- .../main/java/org/elasticsearch/test/BackgroundIndexer.java | 5 +++-- .../src/main/java/org/elasticsearch/test/ESTestCase.java | 2 +- .../java/org/elasticsearch/test/ExternalTestCluster.java | 4 ++-- .../java/org/elasticsearch/test/InternalTestCluster.java | 4 ++-- .../src/main/java/org/elasticsearch/test/TestCluster.java | 5 +++-- .../org/elasticsearch/test/disruption/NetworkDisruption.java | 5 +++-- .../elasticsearch/test/disruption/SingleNodeDisruption.java | 4 ++-- .../org/elasticsearch/test/engine/MockEngineSupport.java | 4 ++-- .../test/junit/listeners/ReproduceInfoPrinter.java | 5 +++-- .../elasticsearch/test/rest/yaml/ClientYamlTestClient.java | 5 +++-- .../test/rest/yaml/ClientYamlTestExecutionContext.java | 5 +++-- .../main/java/org/elasticsearch/test/rest/yaml/Stash.java | 4 ++-- .../test/rest/yaml/section/ContainsAssertion.java | 4 ++-- .../org/elasticsearch/test/rest/yaml/section/DoSection.java | 4 ++-- .../test/rest/yaml/section/GreaterThanAssertion.java | 4 ++-- .../test/rest/yaml/section/GreaterThanEqualToAssertion.java | 4 ++-- .../test/rest/yaml/section/IsFalseAssertion.java | 4 ++-- .../test/rest/yaml/section/IsTrueAssertion.java | 4 ++-- .../test/rest/yaml/section/LengthAssertion.java | 4 ++-- .../test/rest/yaml/section/LessThanAssertion.java | 4 ++-- .../test/rest/yaml/section/LessThanOrEqualToAssertion.java | 4 ++-- .../elasticsearch/test/rest/yaml/section/MatchAssertion.java | 4 ++-- 68 files changed, 151 insertions(+), 133 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index b0ac2ed5fa0..2a155d2e3ad 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchTransportRequest; @@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicReference; * SearchPhaseContext for tests */ public final class MockSearchPhaseContext implements SearchPhaseContext { - private static final Logger logger = Loggers.getLogger(MockSearchPhaseContext.class); + private static final Logger logger = LogManager.getLogger(MockSearchPhaseContext.class); public AtomicReference phaseFailure = new AtomicReference<>(); final int numShards; final AtomicInteger numSuccess; diff --git a/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java index b814716cb47..fbb0fa732f6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/allocation/AwarenessAllocationIT.java @@ -20,6 +20,8 @@ package org.elasticsearch.cluster.allocation; import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +30,6 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.test.ESIntegTestCase; @@ -43,7 +44,7 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope= ESIntegTestCase.Scope.TEST, numDataNodes =0, minNumDataNodes = 2) public class AwarenessAllocationIT extends ESIntegTestCase { - private final Logger logger = Loggers.getLogger(AwarenessAllocationIT.class); + private final Logger logger = LogManager.getLogger(AwarenessAllocationIT.class); @Override protected int numberOfReplicas() { diff --git a/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 7735fe4b241..e7bcce2817c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.allocation; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -74,7 +75,7 @@ import static org.hamcrest.Matchers.hasSize; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class ClusterRerouteIT extends ESIntegTestCase { - private final Logger logger = Loggers.getLogger(ClusterRerouteIT.class); + private final Logger logger = LogManager.getLogger(ClusterRerouteIT.class); public void testRerouteWithCommands_disableAllocationSettings() throws Exception { Settings commonSettings = Settings.builder() @@ -334,7 +335,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1)) .execute().actionGet(); - Logger actionLogger = Loggers.getLogger(TransportClusterRerouteAction.class); + Logger actionLogger = LogManager.getLogger(TransportClusterRerouteAction.class); MockLogAppender dryRunMockLog = new MockLogAppender(); dryRunMockLog.start(); diff --git a/server/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java index ccdc1d6ab33..c3d1a6040a8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -29,7 +30,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; @@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope= Scope.TEST, numDataNodes =0) public class FilteringAllocationIT extends ESIntegTestCase { - private final Logger logger = Loggers.getLogger(FilteringAllocationIT.class); + private final Logger logger = LogManager.getLogger(FilteringAllocationIT.class); public void testDecommissionNodeNoReplicas() throws Exception { logger.info("--> starting 2 nodes"); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java index e658ff03a18..dd9846a7b75 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AddIncrementallyTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -32,7 +34,6 @@ import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.hamcrest.Matcher; @@ -47,7 +48,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class AddIncrementallyTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(AddIncrementallyTests.class); + private final Logger logger = LogManager.getLogger(AddIncrementallyTests.class); public void testAddNodesAndIndices() { Settings.Builder settings = Settings.builder(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 1a50ac03e4c..189dc4542b4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterInfo; @@ -44,7 +45,6 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -70,7 +70,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class AllocationCommandsTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(AllocationCommandsTests.class); + private final Logger logger = LogManager.getLogger(AllocationCommandsTests.class); public void testMoveShardCommand() { AllocationService allocation = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java index 2c1ec07c7fa..38a72adeb1b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AwarenessAllocationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -34,7 +35,6 @@ import org.elasticsearch.cluster.routing.allocation.command.CancelAllocationComm import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.AwarenessAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import java.util.HashMap; @@ -51,7 +51,7 @@ import static org.hamcrest.Matchers.sameInstance; public class AwarenessAllocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(AwarenessAllocationTests.class); + private final Logger logger = LogManager.getLogger(AwarenessAllocationTests.class); public void testMoveShardOnceNewNodeWithAttributeAdded1() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java index 5e400d95e4b..006c6dff6eb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/BalanceConfigurationTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.Version; @@ -38,7 +40,6 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.gateway.GatewayAllocator; @@ -52,7 +53,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; public class BalanceConfigurationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(BalanceConfigurationTests.class); + private final Logger logger = LogManager.getLogger(BalanceConfigurationTests.class); // TODO maybe we can randomize these numbers somehow final int numberOfNodes = 25; final int numberOfIndices = 12; diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index 8cccdb08fb5..cb47426102d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +32,6 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.gateway.TestGatewayAllocator; @@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ClusterRebalanceRoutingTests.class); + private final Logger logger = LogManager.getLogger(ClusterRebalanceRoutingTests.class); public void testAlways() { AllocationService strategy = createAllocationService(Settings.builder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java index aa7be906a63..5c851467c04 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ConcurrentRebalanceRoutingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class ConcurrentRebalanceRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ConcurrentRebalanceRoutingTests.class); + private final Logger logger = LogManager.getLogger(ConcurrentRebalanceRoutingTests.class); public void testClusterConcurrentRebalance() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java index 81c6685ca14..01cb7095689 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/DeadNodesAllocationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -30,7 +31,6 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class DeadNodesAllocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(DeadNodesAllocationTests.class); + private final Logger logger = LogManager.getLogger(DeadNodesAllocationTests.class); public void testSimpleDeadNodeOnStartedPrimaryShard() { AllocationService allocation = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java index 37e8d83592f..8c710d01a30 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -29,7 +30,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -38,7 +38,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class ElectReplicaAsPrimaryDuringRelocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ElectReplicaAsPrimaryDuringRelocationTests.class); + private final Logger logger = LogManager.getLogger(ElectReplicaAsPrimaryDuringRelocationTests.class); public void testElectReplicaAsPrimaryDuringRelocation() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java index 8ebe627751c..106e95b6775 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ExpectedShardSizeAllocationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterInfo; @@ -33,7 +34,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class ExpectedShardSizeAllocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ExpectedShardSizeAllocationTests.class); + private final Logger logger = LogManager.getLogger(ExpectedShardSizeAllocationTests.class); public void testInitializingHasExpectedSize() { final long byteSize = randomIntBetween(0, Integer.MAX_VALUE); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index fbdcadc6ec3..b84c65e8167 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; @@ -38,7 +39,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.indices.cluster.ClusterStateChanges; @@ -63,7 +63,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.hamcrest.Matchers.equalTo; public class FailedNodeRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(FailedNodeRoutingTests.class); + private final Logger logger = LogManager.getLogger(FailedNodeRoutingTests.class); public void testSimpleFailedNodeTest() { AllocationService strategy = createAllocationService(Settings.builder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 787789d410f..05e77c4cf4b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -34,7 +36,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.VersionUtils; @@ -56,7 +57,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class FailedShardsRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(FailedShardsRoutingTests.class); + private final Logger logger = LogManager.getLogger(FailedShardsRoutingTests.class); public void testFailedShardPrimaryRelocatingToAndFrom() { AllocationService allocation = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java index d6e54b6e3b3..6086482a442 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexBalanceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -29,7 +30,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class IndexBalanceTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(IndexBalanceTests.class); + private final Logger logger = LogManager.getLogger(IndexBalanceTests.class); public void testBalanceAllNodesStarted() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 711e7401ad2..89d19e03957 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; @@ -49,7 +50,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.shard.ShardId; @@ -76,7 +76,7 @@ import static org.hamcrest.core.Is.is; public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(NodeVersionAllocationDeciderTests.class); + private final Logger logger = LogManager.getLogger(NodeVersionAllocationDeciderTests.class); public void testDoNotAllocateFromPrimary() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java index 35a9be017d5..cdd868c158e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PreferPrimaryAllocationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -28,14 +29,13 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.hamcrest.Matchers.equalTo; public class PreferPrimaryAllocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(PreferPrimaryAllocationTests.class); + private final Logger logger = LogManager.getLogger(PreferPrimaryAllocationTests.class); public void testPreferPrimaryAllocationOverReplicas() { logger.info("create an allocation with 1 initial recoveries"); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java index a634d32d71d..f306184c576 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryElectionRoutingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -38,7 +38,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class PrimaryElectionRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(PrimaryElectionRoutingTests.class); + private final Logger logger = LogManager.getLogger(PrimaryElectionRoutingTests.class); public void testBackupElectionToPrimaryWhenPrimaryCanBeAllocatedToAnotherNode() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java index e5725ed61ef..dcca97369e7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/PrimaryNotRelocatedWhileBeingRecoveredTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -36,7 +36,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.hamcrest.Matchers.equalTo; public class PrimaryNotRelocatedWhileBeingRecoveredTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(PrimaryNotRelocatedWhileBeingRecoveredTests.class); + private final Logger logger = LogManager.getLogger(PrimaryNotRelocatedWhileBeingRecoveredTests.class); public void testPrimaryNotRelocatedWhileBeingRecoveredFrom() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index 1406e4d6d61..565f9c919d0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterInfo; @@ -32,7 +33,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -43,7 +43,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class RebalanceAfterActiveTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(RebalanceAfterActiveTests.class); + private final Logger logger = LogManager.getLogger(RebalanceAfterActiveTests.class); public void testRebalanceOnlyAfterAllShardsAreActive() { final long[] sizes = new long[5]; diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java index ab64d0131ec..7a90f93516a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ReplicaAllocatedAfterPrimaryTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -27,7 +28,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class ReplicaAllocatedAfterPrimaryTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ReplicaAllocatedAfterPrimaryTests.class); + private final Logger logger = LogManager.getLogger(ReplicaAllocatedAfterPrimaryTests.class); public void testBackupIsAllocatedAfterPrimary() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java index 9401cc1ca6f..bc88158356c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesIntegrityTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +32,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class RoutingNodesIntegrityTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(IndexBalanceTests.class); + private final Logger logger = LogManager.getLogger(IndexBalanceTests.class); public void testBalanceAllNodesStarted() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java index 4b74cee8671..f059125f1ea 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; @@ -39,7 +40,6 @@ import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; @@ -52,7 +52,7 @@ import static org.elasticsearch.cluster.routing.allocation.RoutingNodesUtils.num import static org.hamcrest.Matchers.equalTo; public class SameShardRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(SameShardRoutingTests.class); + private final Logger logger = LogManager.getLogger(SameShardRoutingTests.class); public void testSameHost() { AllocationService strategy = createAllocationService( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java index 534e2af5a89..3cf53e60c48 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardVersioningTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -38,7 +38,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.hamcrest.Matchers.equalTo; public class ShardVersioningTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ShardVersioningTests.class); + private final Logger logger = LogManager.getLogger(ShardVersioningTests.class); public void testSimple() { AllocationService strategy = createAllocationService(Settings.builder().put(ClusterRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java index 7530e34cb83..314318fc29f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsLimitAllocationTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +32,6 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -41,7 +41,7 @@ import static org.elasticsearch.cluster.routing.allocation.RoutingNodesUtils.num import static org.hamcrest.Matchers.equalTo; public class ShardsLimitAllocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ShardsLimitAllocationTests.class); + private final Logger logger = LogManager.getLogger(ShardsLimitAllocationTests.class); public void testIndexLevelShardsLimitAllocate() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index 25d29d0fca4..7bdad46d61c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -31,7 +32,6 @@ import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import java.util.HashSet; @@ -50,7 +50,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(SingleShardNoReplicasRoutingTests.class); + private final Logger logger = LogManager.getLogger(SingleShardNoReplicasRoutingTests.class); public void testSingleIndexStartedShard() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java index 44c8d5ac4d3..ac8f1af219b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardOneReplicaRoutingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class SingleShardOneReplicaRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(SingleShardOneReplicaRoutingTests.class); + private final Logger logger = LogManager.getLogger(SingleShardOneReplicaRoutingTests.class); public void testSingleIndexFirstStartPrimaryThenBackups() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java index 0239ee6235e..a7179bfba78 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/TenShardsOneReplicaRoutingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -29,7 +30,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class TenShardsOneReplicaRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(TenShardsOneReplicaRoutingTests.class); + private final Logger logger = LogManager.getLogger(TenShardsOneReplicaRoutingTests.class); public void testSingleIndexFirstStartPrimaryThenBackups() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index 01586d9c495..769399a8049 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -21,6 +21,8 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -41,7 +43,6 @@ import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationComman import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -62,7 +63,7 @@ import static org.elasticsearch.cluster.routing.ShardRoutingState.UNASSIGNED; import static org.hamcrest.Matchers.equalTo; public class ThrottlingAllocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(ThrottlingAllocationTests.class); + private final Logger logger = LogManager.getLogger(ThrottlingAllocationTests.class); public void testPrimaryRecoveryThrottling() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java index 3001a4ba9e4..44da514c0b5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/UpdateNumberOfReplicasTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -28,7 +29,6 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class UpdateNumberOfReplicasTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(UpdateNumberOfReplicasTests.class); + private final Logger logger = LogManager.getLogger(UpdateNumberOfReplicasTests.class); public void testUpdateNumberOfReplicas() { AllocationService strategy = createAllocationService(Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java index 5be51ceb3ae..20bd5957aeb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.cluster.routing.allocation.decider; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -32,7 +34,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Allocation; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.Rebalance; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -49,7 +50,7 @@ import static org.hamcrest.Matchers.equalTo; public class EnableAllocationTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(EnableAllocationTests.class); + private final Logger logger = LogManager.getLogger(EnableAllocationTests.class); public void testClusterEnableNone() { AllocationService strategy = createAllocationService(Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/common/UUIDTests.java b/server/src/test/java/org/elasticsearch/common/UUIDTests.java index 849db0dc712..dcc440acbcd 100644 --- a/server/src/test/java/org/elasticsearch/common/UUIDTests.java +++ b/server/src/test/java/org/elasticsearch/common/UUIDTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.common; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; @@ -28,7 +29,6 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SerialMergeScheduler; import org.apache.lucene.store.Directory; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -117,7 +117,7 @@ public class UUIDTests extends ESTestCase { } public void testCompression() throws Exception { - Logger logger = Loggers.getLogger(UUIDTests.class); + Logger logger = LogManager.getLogger(UUIDTests.class); // Low number so that the test runs quickly, but the results are more interesting with larger numbers // of indexed documents assertThat(testCompression(100000, 10000, 3, logger), Matchers.lessThan(14d)); // ~12 in practice diff --git a/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java b/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java index 490f7961a89..537bb3db70a 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.logging; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; + +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; @@ -49,7 +51,7 @@ public class DeprecationLoggerTests extends ESTestCase { private static final RegexMatcher warningValueMatcher = matches(WARNING_HEADER_PATTERN.pattern()); - private final DeprecationLogger logger = new DeprecationLogger(Loggers.getLogger(getClass())); + private final DeprecationLogger logger = new DeprecationLogger(LogManager.getLogger(getClass())); @Override protected boolean enableWarningsCheck() { diff --git a/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java b/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java index 9b69a876c1d..43df5a90036 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.logging; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; @@ -57,7 +58,7 @@ public class LoggersTests extends ESTestCase { public void testParameterizedMessageLambda() throws Exception { final MockAppender appender = new MockAppender("trace_appender"); appender.start(); - final Logger testLogger = Loggers.getLogger(LoggersTests.class); + final Logger testLogger = LogManager.getLogger(LoggersTests.class); Loggers.addAppender(testLogger, appender); Loggers.setLevel(testLogger, Level.TRACE); diff --git a/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index 2b58831a956..2a459477062 100644 --- a/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.gateway; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -292,7 +292,7 @@ public class AsyncShardFetchTests extends ESTestCase { private AtomicInteger reroute = new AtomicInteger(); TestFetch(ThreadPool threadPool) { - super(Loggers.getLogger(TestFetch.class), "test", new ShardId("test", "_na_", 1), null); + super(LogManager.getLogger(TestFetch.class), "test", new ShardId("test", "_na_", 1), null); this.threadPool = threadPool; } diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index 4a0d6a8e888..ff8393b659d 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.gateway; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -35,7 +36,6 @@ import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; @@ -63,7 +63,7 @@ import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class GatewayIndexStateIT extends ESIntegTestCase { - private final Logger logger = Loggers.getLogger(GatewayIndexStateIT.class); + private final Logger logger = LogManager.getLogger(GatewayIndexStateIT.class); public void testMappingMetaDataParsed() throws Exception { logger.info("--> starting 1 nodes"); diff --git a/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index 2b3f5072708..59d04c767d8 100644 --- a/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/server/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.indices.state; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -29,7 +30,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexClosedException; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class SimpleIndexStateIT extends ESIntegTestCase { - private final Logger logger = Loggers.getLogger(SimpleIndexStateIT.class); + private final Logger logger = LogManager.getLogger(SimpleIndexStateIT.class); public void testSimpleOpenClose() { logger.info("--> creating test index"); diff --git a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index b0d25f43bd6..720fd0acdf0 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.recovery; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -29,7 +30,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; @@ -55,7 +55,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTi @TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE") public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { - private final Logger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class); + private final Logger logger = LogManager.getLogger(RecoveryWhileUnderLoadIT.class); public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java index 2e28d16c71d..81bd844c8fb 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -29,7 +30,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollAction; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; @@ -269,7 +269,7 @@ public class SearchCancellationIT extends ESIntegTestCase { public Map, Object>> pluginScripts() { return Collections.singletonMap(SCRIPT_NAME, params -> { LeafFieldsLookup fieldsLookup = (LeafFieldsLookup) params.get("_fields"); - Loggers.getLogger(SearchCancellationIT.class).info("Blocking on the document {}", fieldsLookup.get("_id")); + LogManager.getLogger(SearchCancellationIT.class).info("Blocking on the document {}", fieldsLookup.get("_id")); hits.incrementAndGet(); try { awaitBusy(() -> shouldBlock.get() == false); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 1321c8bca47..5d0bbf0f853 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; @@ -81,7 +81,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { } } Arrays.sort(percents); - Loggers.getLogger(HDRPercentileRanksIT.class).info("Using values={}", Arrays.toString(percents)); + LogManager.getLogger(HDRPercentileRanksIT.class).info("Using values={}", Arrays.toString(percents)); return percents; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 67eb4939ae5..256717f809f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; @@ -82,7 +82,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { } } Arrays.sort(percentiles); - Loggers.getLogger(HDRPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); + LogManager.getLogger(HDRPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); return percentiles; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 8cbf9883fe5..4a68cb68582 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; @@ -81,7 +81,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { } } Arrays.sort(percents); - Loggers.getLogger(TDigestPercentileRanksIT.class).info("Using values={}", Arrays.toString(percents)); + LogManager.getLogger(TDigestPercentileRanksIT.class).info("Using values={}", Arrays.toString(percents)); return percents; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index 73ce6c7ece7..25e3435ea97 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.search.aggregations.metrics; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; @@ -81,7 +81,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { } } Arrays.sort(percentiles); - Loggers.getLogger(TDigestPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); + LogManager.getLogger(TDigestPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); return percentiles; } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java index 9f2b60c6901..ac58b0e25b9 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.transport; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; @@ -44,12 +45,12 @@ public class TransportLoggerTests extends ESTestCase { public void setUp() throws Exception { super.setUp(); appender = new MockLogAppender(); - Loggers.addAppender(Loggers.getLogger(TransportLogger.class), appender); + Loggers.addAppender(LogManager.getLogger(TransportLogger.class), appender); appender.start(); } public void tearDown() throws Exception { - Loggers.removeAppender(Loggers.getLogger(TransportLogger.class), appender); + Loggers.removeAppender(LogManager.getLogger(TransportLogger.class), appender); appender.stop(); super.tearDown(); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java index c078e88da20..771188341a7 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/store/EsBaseDirectoryTestCase.java @@ -21,12 +21,13 @@ package org.elasticsearch.index.store; import com.carrotsearch.randomizedtesting.annotations.Listeners; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.BaseDirectoryTestCase; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.bootstrap.BootstrapForTesting; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.junit.listeners.ReproduceInfoPrinter; /** @@ -48,6 +49,6 @@ public abstract class EsBaseDirectoryTestCase extends BaseDirectoryTestCase { BootstrapForTesting.ensureInitialized(); } - protected final Logger logger = Loggers.getLogger(getClass()); + protected final Logger logger = LogManager.getLogger(getClass()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index f60fa610fc4..198c02829b1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -20,6 +20,8 @@ package org.elasticsearch.test;/* import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; @@ -29,7 +31,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -50,7 +51,7 @@ import static org.hamcrest.Matchers.equalTo; public class BackgroundIndexer implements AutoCloseable { - private final Logger logger = Loggers.getLogger(getClass()); + private final Logger logger = LogManager.getLogger(getClass()); final Thread[] writers; final CountDownLatch stopLatch; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 82d2c2302e6..4b32745b62a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -249,7 +249,7 @@ public abstract class ESTestCase extends LuceneTestCase { System.setProperty("io.netty.leakDetection.level", "paranoid"); } - protected final Logger logger = Loggers.getLogger(getClass()); + protected final Logger logger = LogManager.getLogger(getClass()); private ThreadContext threadContext; // ----------------------------------------------------------------- diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 6496894baad..16533c5c4de 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -19,6 +19,7 @@ package org.elasticsearch.test; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -28,7 +29,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -57,7 +57,7 @@ import static org.junit.Assert.assertThat; */ public final class ExternalTestCluster extends TestCluster { - private static final Logger logger = Loggers.getLogger(ExternalTestCluster.class); + private static final Logger logger = LogManager.getLogger(ExternalTestCluster.class); private static final AtomicInteger counter = new AtomicInteger(); public static final String EXTERNAL_CLUSTER_PREFIX = "external_"; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index ce7e6369412..de4226bf275 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -28,6 +28,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; @@ -57,7 +58,6 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureSettings; @@ -174,7 +174,7 @@ import static org.junit.Assert.fail; */ public final class InternalTestCluster extends TestCluster { - private final Logger logger = Loggers.getLogger(getClass()); + private final Logger logger = LogManager.getLogger(getClass()); public static final int DEFAULT_LOW_NUM_MASTER_NODES = 1; public static final int DEFAULT_HIGH_NUM_MASTER_NODES = 3; diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index a11b70bfa10..b5aa26a3854 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -20,6 +20,8 @@ package org.elasticsearch.test; import com.carrotsearch.hppc.ObjectArrayList; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; @@ -27,7 +29,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.repositories.RepositoryMissingException; @@ -46,7 +47,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke */ public abstract class TestCluster implements Closeable { - protected final Logger logger = Loggers.getLogger(getClass()); + protected final Logger logger = LogManager.getLogger(getClass()); private final long seed; protected Random random; diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java index 8054847b642..d620e7633f2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/NetworkDisruption.java @@ -20,11 +20,12 @@ package org.elasticsearch.test.disruption; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.InternalTestCluster; @@ -48,7 +49,7 @@ import static org.junit.Assert.assertFalse; */ public class NetworkDisruption implements ServiceDisruptionScheme { - private final Logger logger = Loggers.getLogger(NetworkDisruption.class); + private final Logger logger = LogManager.getLogger(NetworkDisruption.class); private final DisruptedLinks disruptedLinks; private final NetworkLinkDisruptionType networkLinkDisruptionType; diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java index a22994cfa9b..69df4595894 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.test.disruption; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.InternalTestCluster; import java.util.Random; @@ -28,7 +28,7 @@ import static org.junit.Assert.assertFalse; public abstract class SingleNodeDisruption implements ServiceDisruptionScheme { - protected final Logger logger = Loggers.getLogger(getClass()); + protected final Logger logger = LogManager.getLogger(getClass()); protected volatile String disruptedNode; protected volatile InternalTestCluster cluster; diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index 182038d5b0a..52b086db338 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.test.engine; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; @@ -28,7 +29,6 @@ import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -66,7 +66,7 @@ public final class MockEngineSupport { private final AtomicBoolean closing = new AtomicBoolean(false); - private final Logger logger = Loggers.getLogger(Engine.class); + private final Logger logger = LogManager.getLogger(Engine.class); private final ShardId shardId; private final QueryCache filterCache; private final QueryCachingPolicy filterCachingPolicy; diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index cddcca59e6c..58e126b4bed 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -19,11 +19,12 @@ package org.elasticsearch.test.junit.listeners; import com.carrotsearch.randomizedtesting.ReproduceErrorMessageBuilder; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; @@ -47,7 +48,7 @@ import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TE */ public class ReproduceInfoPrinter extends RunListener { - protected final Logger logger = Loggers.getLogger(ESTestCase.class); + protected final Logger logger = LogManager.getLogger(ESTestCase.class); @Override public void testStarted(Description description) throws Exception { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index 856fd2a32de..6b400bfb351 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -19,11 +19,13 @@ package org.elasticsearch.test.rest.yaml; import com.carrotsearch.randomizedtesting.RandomizedTest; + import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.client.methods.HttpGet; import org.apache.http.entity.ContentType; import org.apache.http.util.EntityUtils; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.client.NodeSelector; @@ -34,7 +36,6 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; @@ -57,7 +58,7 @@ import java.util.stream.Collectors; * REST calls. */ public class ClientYamlTestClient implements Closeable { - private static final Logger logger = Loggers.getLogger(ClientYamlTestClient.class); + private static final Logger logger = LogManager.getLogger(ClientYamlTestClient.class); private static final ContentType YAML_CONTENT_TYPE = ContentType.create("application/yaml"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 4061b627cd8..b1337172a56 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -19,15 +19,16 @@ package org.elasticsearch.test.rest.yaml; import com.carrotsearch.randomizedtesting.RandomizedTest; + import org.apache.http.HttpEntity; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -47,7 +48,7 @@ import java.util.Map; */ public class ClientYamlTestExecutionContext { - private static final Logger logger = Loggers.getLogger(ClientYamlTestExecutionContext.class); + private static final Logger logger = LogManager.getLogger(ClientYamlTestExecutionContext.class); private static final XContentType[] STREAMING_CONTENT_TYPES = new XContentType[]{XContentType.JSON, XContentType.SMILE}; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java index a5edeb0195b..6ac78bf6bcf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Stash.java @@ -19,9 +19,9 @@ package org.elasticsearch.test.rest.yaml; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -42,7 +42,7 @@ public class Stash implements ToXContentFragment { private static final Pattern EXTENDED_KEY = Pattern.compile("\\$\\{([^}]+)\\}"); private static final Pattern PATH = Pattern.compile("\\$_path"); - private static final Logger logger = Loggers.getLogger(Stash.class); + private static final Logger logger = LogManager.getLogger(Stash.class); public static final Stash EMPTY = new Stash(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java index 9d2d91790c7..3241149dfae 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -43,7 +43,7 @@ public class ContainsAssertion extends Assertion { return new ContainsAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); } - private static final Logger logger = Loggers.getLogger(ContainsAssertion.class); + private static final Logger logger = LogManager.getLogger(ContainsAssertion.class); public ContainsAssertion(XContentLocation location, String field, Object expectedValue) { super(location, field, expectedValue); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 892c5874dce..5fb5c1d003d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -19,6 +19,7 @@ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.client.HasAttributeNodeSelector; @@ -28,7 +29,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentLocation; @@ -183,7 +183,7 @@ public class DoSection implements ExecutableSection { } - private static final Logger logger = Loggers.getLogger(DoSection.class); + private static final Logger logger = LogManager.getLogger(DoSection.class); private final XContentLocation location; private String catchParam; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java index 494d65e05de..f3518bedd02 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanAssertion.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -47,7 +47,7 @@ public class GreaterThanAssertion extends Assertion { return new GreaterThanAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); } - private static final Logger logger = Loggers.getLogger(GreaterThanAssertion.class); + private static final Logger logger = LogManager.getLogger(GreaterThanAssertion.class); public GreaterThanAssertion(XContentLocation location, String field, Object expectedValue) { super(location, field, expectedValue); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java index 3fd9bf7adfd..e35fc445050 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/GreaterThanEqualToAssertion.java @@ -19,9 +19,9 @@ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -48,7 +48,7 @@ public class GreaterThanEqualToAssertion extends Assertion { return new GreaterThanEqualToAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); } - private static final Logger logger = Loggers.getLogger(GreaterThanEqualToAssertion.class); + private static final Logger logger = LogManager.getLogger(GreaterThanEqualToAssertion.class); public GreaterThanEqualToAssertion(XContentLocation location, String field, Object expectedValue) { super(location, field, expectedValue); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java index 56ee603c70f..276e4e02291 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsFalseAssertion.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -41,7 +41,7 @@ public class IsFalseAssertion extends Assertion { return new IsFalseAssertion(parser.getTokenLocation(), ParserUtils.parseField(parser)); } - private static final Logger logger = Loggers.getLogger(IsFalseAssertion.class); + private static final Logger logger = LogManager.getLogger(IsFalseAssertion.class); public IsFalseAssertion(XContentLocation location, String field) { super(location, field, false); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java index 9b3f37e1f52..b4cbe3c496f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/IsTrueAssertion.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -42,7 +42,7 @@ public class IsTrueAssertion extends Assertion { return new IsTrueAssertion(parser.getTokenLocation(), ParserUtils.parseField(parser)); } - private static final Logger logger = Loggers.getLogger(IsTrueAssertion.class); + private static final Logger logger = LogManager.getLogger(IsTrueAssertion.class); public IsTrueAssertion(XContentLocation location, String field) { super(location, field, true); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java index aeecc50b90d..cad415b40f9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LengthAssertion.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -55,7 +55,7 @@ public class LengthAssertion extends Assertion { return new LengthAssertion(location, stringObjectTuple.v1(), value); } - private static final Logger logger = Loggers.getLogger(LengthAssertion.class); + private static final Logger logger = LogManager.getLogger(LengthAssertion.class); public LengthAssertion(XContentLocation location, String field, Object expectedValue) { super(location, field, expectedValue); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java index 75a1edcf81c..84461202df4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanAssertion.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -48,7 +48,7 @@ public class LessThanAssertion extends Assertion { return new LessThanAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); } - private static final Logger logger = Loggers.getLogger(LessThanAssertion.class); + private static final Logger logger = LogManager.getLogger(LessThanAssertion.class); public LessThanAssertion(XContentLocation location, String field, Object expectedValue) { super(location, field, expectedValue); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java index 23b6a1e4efc..0b11b304ce6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/LessThanOrEqualToAssertion.java @@ -19,9 +19,9 @@ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; @@ -48,7 +48,7 @@ public class LessThanOrEqualToAssertion extends Assertion { return new LessThanOrEqualToAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); } - private static final Logger logger = Loggers.getLogger(LessThanOrEqualToAssertion.class); + private static final Logger logger = LogManager.getLogger(LessThanOrEqualToAssertion.class); public LessThanOrEqualToAssertion(XContentLocation location, String field, Object expectedValue) { super(location, field, expectedValue); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java index 6ecaae75a8e..09f88f42492 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.test.rest.yaml.section; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.NotEqualMessageBuilder; @@ -48,7 +48,7 @@ public class MatchAssertion extends Assertion { return new MatchAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); } - private static final Logger logger = Loggers.getLogger(MatchAssertion.class); + private static final Logger logger = LogManager.getLogger(MatchAssertion.class); public MatchAssertion(XContentLocation location, String field, Object expectedValue) { super(location, field, expectedValue); From 2b652f324240fc825f23210cffb3b73ecf9fb397 Mon Sep 17 00:00:00 2001 From: lipsill <39668292+lipsill@users.noreply.github.com> Date: Thu, 25 Oct 2018 15:52:50 +0200 Subject: [PATCH 62/67] Logging: server: clean up logging (#34593) Replace internal deprecated calls to `Loggers.getLogger(Class)` with direct calls to log4j `LogManager.getLogger(Class)` --- .../src/main/java/org/elasticsearch/ExceptionsHelper.java | 4 ++-- .../admin/indices/template/put/PutIndexTemplateRequest.java | 4 ++-- .../org/elasticsearch/action/bulk/BulkRequestHandler.java | 4 ++-- .../action/support/PlainListenableActionFuture.java | 4 ++-- .../main/java/org/elasticsearch/bootstrap/Bootstrap.java | 2 +- .../java/org/elasticsearch/bootstrap/BootstrapChecks.java | 6 +++--- .../bootstrap/ElasticsearchUncaughtExceptionHandler.java | 4 ++-- .../main/java/org/elasticsearch/bootstrap/JNACLibrary.java | 5 +++-- .../org/elasticsearch/bootstrap/JNAKernel32Library.java | 5 +++-- .../main/java/org/elasticsearch/bootstrap/JNANatives.java | 5 +++-- .../src/main/java/org/elasticsearch/bootstrap/Natives.java | 4 ++-- .../java/org/elasticsearch/bootstrap/SystemCallFilter.java | 5 +++-- .../cluster/metadata/IndexTemplateMetaData.java | 5 +++-- .../java/org/elasticsearch/cluster/metadata/MetaData.java | 5 +++-- .../routing/allocation/command/CancelAllocationCommand.java | 4 ++-- .../org/elasticsearch/cluster/service/MasterService.java | 4 ++-- .../java/org/elasticsearch/common/inject/spi/Elements.java | 4 ++-- .../java/org/elasticsearch/common/network/IfConfig.java | 4 ++-- .../java/org/elasticsearch/common/settings/Settings.java | 4 ++-- .../java/org/elasticsearch/common/unit/ByteSizeValue.java | 4 ++-- .../common/xcontent/LoggingDeprecationHandler.java | 4 ++-- .../java/org/elasticsearch/gateway/MetaDataStateFormat.java | 4 ++-- .../fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java | 4 ++-- .../org/elasticsearch/index/mapper/DynamicTemplate.java | 4 ++-- .../java/org/elasticsearch/index/mapper/MapperService.java | 4 ++-- .../org/elasticsearch/index/query/TypeQueryBuilder.java | 4 ++-- .../query/functionscore/RandomScoreFunctionBuilder.java | 5 +++-- .../index/reindex/WorkerBulkByScrollTaskState.java | 4 ++-- .../elasticsearch/index/shard/ElasticsearchMergePolicy.java | 4 ++-- .../index/shard/RemoveCorruptedShardDataCommand.java | 5 +++-- .../elasticsearch/index/similarity/SimilarityProviders.java | 4 ++-- .../index/translog/TruncateTranslogAction.java | 4 ++-- .../indices/breaker/HierarchyCircuitBreakerService.java | 6 +++--- .../src/main/java/org/elasticsearch/monitor/jvm/JvmPid.java | 4 ++-- server/src/main/java/org/elasticsearch/node/Node.java | 4 ++-- .../main/java/org/elasticsearch/plugins/PluginsService.java | 5 ++--- .../org/elasticsearch/rest/action/RestActionListener.java | 4 ++-- .../action/admin/indices/RestPutIndexTemplateAction.java | 5 +++-- .../elasticsearch/rest/action/search/RestSearchAction.java | 4 ++-- .../main/java/org/elasticsearch/script/ScriptMetaData.java | 4 ++-- .../java/org/elasticsearch/script/StoredScriptSource.java | 4 ++-- .../elasticsearch/search/aggregations/InternalOrder.java | 4 ++-- .../significant/SignificantTermsAggregatorFactory.java | 5 +++-- .../aggregations/bucket/terms/TermsAggregatorFactory.java | 4 ++-- .../elasticsearch/search/builder/SearchSourceBuilder.java | 4 ++-- .../search/fetch/subphase/DocValueFieldsFetchSubPhase.java | 5 +++-- .../java/org/elasticsearch/search/slice/SliceBuilder.java | 4 ++-- .../org/elasticsearch/search/sort/FieldSortBuilder.java | 4 ++-- .../elasticsearch/search/sort/GeoDistanceSortBuilder.java | 4 ++-- .../org/elasticsearch/search/sort/ScriptSortBuilder.java | 4 ++-- .../java/org/elasticsearch/tasks/LoggingTaskListener.java | 4 ++-- .../main/java/org/elasticsearch/watcher/FileWatcher.java | 4 ++-- 52 files changed, 116 insertions(+), 106 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java index 9f756666217..923a76c0acb 100644 --- a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -19,13 +19,13 @@ package org.elasticsearch; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; import org.elasticsearch.rest.RestStatus; @@ -47,7 +47,7 @@ import java.util.stream.Collectors; public final class ExceptionsHelper { - private static final Logger logger = Loggers.getLogger(ExceptionsHelper.class); + private static final Logger logger = LogManager.getLogger(ExceptionsHelper.class); public static RuntimeException convertToRuntime(Exception e) { if (e instanceof RuntimeException) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index d254f989d4a..5b1b3dd2158 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.admin.indices.template.put; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; @@ -34,7 +35,6 @@ import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -69,7 +69,7 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; */ public class PutIndexTemplateRequest extends MasterNodeRequest implements IndicesRequest, ToXContent { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(PutIndexTemplateRequest.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(PutIndexTemplateRequest.class)); private String name; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java index cf5f94a9738..2f5db520088 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestHandler.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.action.bulk; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.threadpool.Scheduler; import java.util.concurrent.CountDownLatch; @@ -43,7 +43,7 @@ public final class BulkRequestHandler { BulkRequestHandler(BiConsumer> consumer, BackoffPolicy backoffPolicy, BulkProcessor.Listener listener, Scheduler scheduler, int concurrentRequests) { assert concurrentRequests >= 0; - this.logger = Loggers.getLogger(getClass()); + this.logger = LogManager.getLogger(getClass()); this.consumer = consumer; this.listener = listener; this.concurrentRequests = concurrentRequests; diff --git a/server/src/main/java/org/elasticsearch/action/support/PlainListenableActionFuture.java b/server/src/main/java/org/elasticsearch/action/support/PlainListenableActionFuture.java index 943c3679709..d99f2e620c1 100644 --- a/server/src/main/java/org/elasticsearch/action/support/PlainListenableActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/support/PlainListenableActionFuture.java @@ -19,10 +19,10 @@ package org.elasticsearch.action.support; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ListenableActionFuture; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; @@ -123,7 +123,7 @@ public class PlainListenableActionFuture extends AdapterActionFuture im private static final class DispatchingListenableActionFuture extends PlainListenableActionFuture { - private static final Logger logger = Loggers.getLogger(DispatchingListenableActionFuture.class); + private static final Logger logger = LogManager.getLogger(DispatchingListenableActionFuture.class); private final ThreadPool threadPool; private DispatchingListenableActionFuture(ThreadPool threadPool) { diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index befe3a00ac1..e615dcf8ace 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -95,7 +95,7 @@ final class Bootstrap { /** initialize native resources */ public static void initializeNatives(Path tmpFile, boolean mlockAll, boolean systemCallFilter, boolean ctrlHandler) { - final Logger logger = Loggers.getLogger(Bootstrap.class); + final Logger logger = LogManager.getLogger(Bootstrap.class); // check if the user is running as root, and bail if (Natives.definitelyRunningAsRoot()) { diff --git a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java index c5a8e806f41..0c433192ad6 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/BootstrapChecks.java @@ -19,12 +19,12 @@ package org.elasticsearch.bootstrap; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Constants; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; @@ -90,7 +90,7 @@ final class BootstrapChecks { final BootstrapContext context, final boolean enforceLimits, final List checks) throws NodeValidationException { - check(context, enforceLimits, checks, Loggers.getLogger(BootstrapChecks.class)); + check(context, enforceLimits, checks, LogManager.getLogger(BootstrapChecks.class)); } /** @@ -417,7 +417,7 @@ final class BootstrapChecks { // visible for testing long getMaxMapCount() { - return getMaxMapCount(Loggers.getLogger(BootstrapChecks.class)); + return getMaxMapCount(LogManager.getLogger(BootstrapChecks.class)); } // visible for testing diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java b/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java index 1ef9b7740c2..1c3c0ccf6b6 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandler.java @@ -19,17 +19,17 @@ package org.elasticsearch.bootstrap; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.Loggers; import java.io.IOError; import java.security.AccessController; import java.security.PrivilegedAction; class ElasticsearchUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { - private static final Logger logger = Loggers.getLogger(ElasticsearchUncaughtExceptionHandler.class); + private static final Logger logger = LogManager.getLogger(ElasticsearchUncaughtExceptionHandler.class); @Override public void uncaughtException(Thread t, Throwable e) { diff --git a/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java b/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java index 64dabe92363..f510480cd54 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/JNACLibrary.java @@ -22,9 +22,10 @@ package org.elasticsearch.bootstrap; import com.sun.jna.Native; import com.sun.jna.NativeLong; import com.sun.jna.Structure; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import org.elasticsearch.common.logging.Loggers; import java.util.Arrays; import java.util.List; @@ -34,7 +35,7 @@ import java.util.List; */ final class JNACLibrary { - private static final Logger logger = Loggers.getLogger(JNACLibrary.class); + private static final Logger logger = LogManager.getLogger(JNACLibrary.class); public static final int MCL_CURRENT = 1; public static final int ENOMEM = 12; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java b/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java index 99574c2b39b..b843d39cbd1 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/JNAKernel32Library.java @@ -26,9 +26,10 @@ import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.WString; import com.sun.jna.win32.StdCallLibrary; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import org.elasticsearch.common.logging.Loggers; import java.util.ArrayList; import java.util.Arrays; @@ -41,7 +42,7 @@ import java.util.List; */ final class JNAKernel32Library { - private static final Logger logger = Loggers.getLogger(JNAKernel32Library.class); + private static final Logger logger = LogManager.getLogger(JNAKernel32Library.class); // Callbacks must be kept around in order to be able to be called later, // when the Windows ConsoleCtrlHandler sends an event. diff --git a/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java b/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java index 4a40db846e0..8e86f6aa4b7 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/JNANatives.java @@ -22,9 +22,10 @@ package org.elasticsearch.bootstrap; import com.sun.jna.Native; import com.sun.jna.Pointer; import com.sun.jna.WString; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.monitor.jvm.JvmInfo; import java.nio.file.Path; @@ -40,7 +41,7 @@ class JNANatives { /** no instantiation */ private JNANatives() {} - private static final Logger logger = Loggers.getLogger(JNANatives.class); + private static final Logger logger = LogManager.getLogger(JNANatives.class); // Set to true, in case native mlockall call was successful static boolean LOCAL_MLOCKALL = false; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java index 9bd2a5c2f3d..10117698e93 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java @@ -19,8 +19,8 @@ package org.elasticsearch.bootstrap; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import java.nio.file.Path; @@ -32,7 +32,7 @@ final class Natives { /** no instantiation */ private Natives() {} - private static final Logger logger = Loggers.getLogger(Natives.class); + private static final Logger logger = LogManager.getLogger(Natives.class); // marker to determine if the JNA class files are available to the JVM static final boolean JNA_AVAILABLE; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java b/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java index c6667bee4cd..59f8bd5daf7 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/SystemCallFilter.java @@ -26,10 +26,11 @@ import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.ptr.PointerByReference; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.nio.ByteBuffer; @@ -91,7 +92,7 @@ import java.util.Map; */ // not an example of how to write code!!! final class SystemCallFilter { - private static final Logger logger = Loggers.getLogger(SystemCallFilter.class); + private static final Logger logger = LogManager.getLogger(SystemCallFilter.class); // Linux implementation, based on seccomp(2) or prctl(2) with bpf filtering diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index 7e2d9256303..608e89514f2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -20,6 +20,8 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; @@ -33,7 +35,6 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; @@ -52,7 +53,7 @@ import java.util.Set; public class IndexTemplateMetaData extends AbstractDiffable { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(IndexTemplateMetaData.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(IndexTemplateMetaData.class)); private final String name; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 8653df73c41..19c3de72279 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -22,7 +22,9 @@ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.action.AliasesRequest; import org.elasticsearch.cluster.ClusterState; @@ -41,7 +43,6 @@ import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -82,7 +83,7 @@ import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; public class MetaData implements Iterable, Diffable, ToXContentFragment { - private static final Logger logger = Loggers.getLogger(MetaData.class); + private static final Logger logger = LogManager.getLogger(MetaData.class); public static final String ALL = "_all"; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java index afeb0e0dab1..a167ced5bc2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.routing.allocation.command; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -32,7 +33,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexNotFoundException; @@ -154,7 +154,7 @@ public class CancelAllocationCommand implements AllocationCommand { discoNode + ", shard is primary and " + shardRouting.state().name().toLowerCase(Locale.ROOT)); } } - routingNodes.failShard(Loggers.getLogger(CancelAllocationCommand.class), shardRouting, + routingNodes.failShard(LogManager.getLogger(CancelAllocationCommand.class), shardRouting, new UnassignedInfo(UnassignedInfo.Reason.REROUTE_CANCELLED, null), indexMetaData, allocation.changes()); // TODO: We don't have to remove a cancelled shard from in-sync set once we have a strict resync implementation. allocation.removeAllocationId(shardRouting); diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 8927adfd434..d720e9d603f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.service; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.Assertions; @@ -38,7 +39,6 @@ import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.TimeValue; @@ -553,7 +553,7 @@ public class MasterService extends AbstractLifecycleComponent { private static class AckCountDownListener implements Discovery.AckListener { - private static final Logger logger = Loggers.getLogger(AckCountDownListener.class); + private static final Logger logger = LogManager.getLogger(AckCountDownListener.class); private final AckedClusterStateTaskListener ackedTaskListener; private final CountDown countDown; diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java b/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java index 6d930953e29..c862ea2aa27 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/Elements.java @@ -16,6 +16,7 @@ package org.elasticsearch.common.inject.spi; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.Binder; @@ -40,7 +41,6 @@ import org.elasticsearch.common.inject.internal.PrivateElementsImpl; import org.elasticsearch.common.inject.internal.ProviderMethodsModule; import org.elasticsearch.common.inject.internal.SourceProvider; import org.elasticsearch.common.inject.matcher.Matcher; -import org.elasticsearch.common.logging.Loggers; import java.lang.annotation.Annotation; import java.util.ArrayList; @@ -338,7 +338,7 @@ public final class Elements { return builder; } - private static Logger logger = Loggers.getLogger(Elements.class); + private static Logger logger = LogManager.getLogger(Elements.class); protected Object getSource() { Object ret; diff --git a/server/src/main/java/org/elasticsearch/common/network/IfConfig.java b/server/src/main/java/org/elasticsearch/common/network/IfConfig.java index a190643d3b4..b728c24da20 100644 --- a/server/src/main/java/org/elasticsearch/common/network/IfConfig.java +++ b/server/src/main/java/org/elasticsearch/common/network/IfConfig.java @@ -19,8 +19,8 @@ package org.elasticsearch.common.network; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.net.Inet6Address; @@ -36,7 +36,7 @@ import java.util.Locale; */ public final class IfConfig { - private static final Logger logger = Loggers.getLogger(IfConfig.class); + private static final Logger logger = LogManager.getLogger(IfConfig.class); private static final String INDENT = " "; /** log interface configuration at debug level, if its enabled */ diff --git a/server/src/main/java/org/elasticsearch/common/settings/Settings.java b/server/src/main/java/org/elasticsearch/common/settings/Settings.java index d350c26ce5a..06bec217acf 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.settings; import org.apache.logging.log4j.Level; import org.elasticsearch.core.internal.io.IOUtils; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; @@ -30,7 +31,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.LogConfigurator; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.MemorySizeValue; @@ -346,7 +346,7 @@ public final class Settings implements ToXContentFragment { * {@link Setting} object constructed in, for example, {@link org.elasticsearch.env.Environment}. */ static class DeprecationLoggerHolder { - static DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(Settings.class)); + static DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(Settings.class)); } /** diff --git a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java index 9d36b6f5ff6..0358f8f318d 100644 --- a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +++ b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.unit; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; @@ -26,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,7 +36,7 @@ import java.util.Objects; public class ByteSizeValue implements Writeable, Comparable, ToXContentFragment { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ByteSizeValue.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(ByteSizeValue.class)); public static final ByteSizeValue ZERO = new ByteSizeValue(0, ByteSizeUnit.BYTES); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/LoggingDeprecationHandler.java b/server/src/main/java/org/elasticsearch/common/xcontent/LoggingDeprecationHandler.java index e075fb1711d..5b92dec573d 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/LoggingDeprecationHandler.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/LoggingDeprecationHandler.java @@ -19,9 +19,9 @@ package org.elasticsearch.common.xcontent; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; /** * Logs deprecations to the {@link DeprecationLogger}. @@ -42,7 +42,7 @@ public class LoggingDeprecationHandler implements DeprecationHandler { * Changing that will require some research to make super duper * sure it is safe. */ - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ParseField.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(ParseField.class)); private LoggingDeprecationHandler() { // Singleton diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index 237b36b53d4..138f9501e6f 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.gateway; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.codecs.CodecUtil; @@ -30,7 +31,6 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.OutputStreamIndexOutput; import org.apache.lucene.store.SimpleFSDirectory; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -73,7 +73,7 @@ public abstract class MetaDataStateFormat { private final String prefix; private final Pattern stateFilePattern; - private static final Logger logger = Loggers.getLogger(MetaDataStateFormat.class); + private static final Logger logger = LogManager.getLogger(MetaDataStateFormat.class); /** * Creates a new {@link MetaDataStateFormat} instance diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index 4b3643dda05..b71dcc75934 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.fielddata.plain; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; @@ -29,7 +30,6 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; @@ -50,7 +50,7 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i private final IndexFieldDataCache cache; private final CircuitBreakerService breakerService; private final Function> scriptFunction; - private static final Logger logger = Loggers.getLogger(SortedSetDVOrdinalsIndexFieldData.class); + private static final Logger logger = LogManager.getLogger(SortedSetDVOrdinalsIndexFieldData.class); public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName, CircuitBreakerService breakerService, Function> scriptFunction) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java index 71a2cdb32f9..1b81977a572 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java @@ -19,9 +19,9 @@ package org.elasticsearch.index.mapper; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,7 +36,7 @@ import java.util.TreeMap; public class DynamicTemplate implements ToXContentObject { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(DynamicTemplate.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(DynamicTemplate.class)); public enum MatchType { SIMPLE { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 5d0239f846a..1bda0157587 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; @@ -35,7 +36,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -117,7 +117,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { "_size", "_timestamp", "_ttl", IgnoredFieldMapper.NAME ); - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(MapperService.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(MapperService.class)); private final IndexAnalyzers indexAnalyzers; diff --git a/server/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java index 9f1916fb719..cb8005ad26c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TypeQueryBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; @@ -28,7 +29,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.DocumentMapper; @@ -40,7 +40,7 @@ public class TypeQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "type"; private static final ParseField VALUE_FIELD = new ParseField("value"); - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TypeQueryBuilder.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(TypeQueryBuilder.class)); private final String type; diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java index c0a13105f84..b5bdc05adfb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/RandomScoreFunctionBuilder.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.index.query.functionscore; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -40,7 +40,8 @@ import java.util.Objects; */ public class RandomScoreFunctionBuilder extends ScoreFunctionBuilder { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(RandomScoreFunctionBuilder.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger( + LogManager.getLogger(RandomScoreFunctionBuilder.class)); public static final String NAME = "random_score"; private String field; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/WorkerBulkByScrollTaskState.java b/server/src/main/java/org/elasticsearch/index/reindex/WorkerBulkByScrollTaskState.java index 3a96259d04a..797d6227561 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/WorkerBulkByScrollTaskState.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/WorkerBulkByScrollTaskState.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.reindex; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.FutureUtils; @@ -43,7 +43,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; */ public class WorkerBulkByScrollTaskState implements SuccessfullyProcessed { - private static final Logger logger = Loggers.getLogger(WorkerBulkByScrollTaskState.class); + private static final Logger logger = LogManager.getLogger(WorkerBulkByScrollTaskState.class); /** * Maximum wait time allowed for throttling. diff --git a/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java b/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java index 430e75ed494..c6f28732b37 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ElasticsearchMergePolicy.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.shard; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.FilterMergePolicy; import org.apache.lucene.index.IndexWriter; @@ -26,7 +27,6 @@ import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.elasticsearch.Version; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.util.Collections; @@ -46,7 +46,7 @@ import java.util.Map; */ public final class ElasticsearchMergePolicy extends FilterMergePolicy { - private static Logger logger = Loggers.getLogger(ElasticsearchMergePolicy.class); + private static Logger logger = LogManager.getLogger(ElasticsearchMergePolicy.class); // True if the next merge request should do segment upgrades: private volatile boolean upgradeInProgress; diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java index 54c1dd7c1db..9535108cad3 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommand.java @@ -21,6 +21,8 @@ package org.elasticsearch.index.shard; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; + +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -45,7 +47,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -74,7 +75,7 @@ import java.util.Objects; public class RemoveCorruptedShardDataCommand extends EnvironmentAwareCommand { - private static final Logger logger = Loggers.getLogger(RemoveCorruptedShardDataCommand.class); + private static final Logger logger = LogManager.getLogger(RemoveCorruptedShardDataCommand.class); private final OptionSpec folderOption; private final OptionSpec indexNameOption; diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java index 9aab1260b6b..2de877551a9 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.similarity; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.similarities.AfterEffect; import org.apache.lucene.search.similarities.AfterEffectB; import org.apache.lucene.search.similarities.AfterEffectL; @@ -52,7 +53,6 @@ import org.apache.lucene.search.similarities.NormalizationH3; import org.apache.lucene.search.similarities.NormalizationZ; import org.elasticsearch.Version; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import java.util.Arrays; @@ -67,7 +67,7 @@ final class SimilarityProviders { private SimilarityProviders() {} // no instantiation - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(SimilarityProviders.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(SimilarityProviders.class)); static final String DISCOUNT_OVERLAPS = "discount_overlaps"; private static final Map BASIC_MODELS; diff --git a/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogAction.java b/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogAction.java index 0b9c3655096..87600f4441b 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogAction.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TruncateTranslogAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.translog; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; @@ -28,7 +29,6 @@ import org.elasticsearch.cli.Terminal; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -55,7 +55,7 @@ import java.util.TreeSet; public class TruncateTranslogAction { - protected static final Logger logger = Loggers.getLogger(TruncateTranslogAction.class); + protected static final Logger logger = LogManager.getLogger(TruncateTranslogAction.class); private final NamedXContentRegistry namedXContentRegistry; public TruncateTranslogAction(NamedXContentRegistry namedXContentRegistry) { diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 235d8b46e5b..eb2dc587bfb 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -19,11 +19,11 @@ package org.elasticsearch.indices.breaker; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.breaker.NoopCircuitBreaker; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -325,7 +325,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { } else { CircuitBreaker oldBreaker; CircuitBreaker breaker = new ChildMemoryCircuitBreaker(breakerSettings, - Loggers.getLogger(CHILD_LOGGER_PREFIX + breakerSettings.getName()), + LogManager.getLogger(CHILD_LOGGER_PREFIX + breakerSettings.getName()), this, breakerSettings.getName()); for (;;) { @@ -335,7 +335,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService { } breaker = new ChildMemoryCircuitBreaker(breakerSettings, (ChildMemoryCircuitBreaker)oldBreaker, - Loggers.getLogger(CHILD_LOGGER_PREFIX + breakerSettings.getName()), + LogManager.getLogger(CHILD_LOGGER_PREFIX + breakerSettings.getName()), this, breakerSettings.getName()); if (breakers.replace(breakerSettings.getName(), oldBreaker, breaker)) { diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmPid.java b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmPid.java index 2b1b2a1df47..71472069069 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/JvmPid.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/JvmPid.java @@ -19,8 +19,8 @@ package org.elasticsearch.monitor.jvm; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.logging.Loggers; import java.lang.management.ManagementFactory; @@ -41,7 +41,7 @@ class JvmPid { try { return Long.parseLong(name.split("@")[0]); } catch (final NumberFormatException e) { - Loggers.getLogger(JvmPid.class).debug(new ParameterizedMessage("failed parsing PID from [{}]", name), e); + LogManager.getLogger(JvmPid.class).debug(new ParameterizedMessage("failed parsing PID from [{}]", name), e); return -1; } } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 911063e8641..875b8b2149d 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -19,6 +19,7 @@ package org.elasticsearch.node; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; @@ -66,7 +67,6 @@ import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; @@ -263,7 +263,7 @@ public class Node implements Closeable { */ protected Node( final Environment environment, Collection> classpathPlugins, boolean forbidPrivateIndexSettings) { - logger = Loggers.getLogger(Node.class); + logger = LogManager.getLogger(Node.class); final List resourcesToClose = new ArrayList<>(); // register everything we need to release in the case of an error boolean success = false; try { diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index 6805a0e40d5..4ce48926871 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugins; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.util.CharFilterFactory; import org.apache.lucene.analysis.util.TokenFilterFactory; import org.apache.lucene.analysis.util.TokenizerFactory; @@ -37,7 +37,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -385,7 +384,7 @@ public class PluginsService extends AbstractComponent { // get a bundle for a single plugin dir private static Bundle readPluginBundle(final Set bundles, final Path plugin, String type) throws IOException { - Loggers.getLogger(PluginsService.class).trace("--- adding [{}] [{}]", type, plugin.toAbsolutePath()); + LogManager.getLogger(PluginsService.class).trace("--- adding [{}] [{}]", type, plugin.toAbsolutePath()); final PluginInfo info; try { info = PluginInfo.readFromProperties(plugin); diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestActionListener.java b/server/src/main/java/org/elasticsearch/rest/action/RestActionListener.java index 572da497c1f..15e535ebfe2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestActionListener.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestActionListener.java @@ -19,9 +19,9 @@ package org.elasticsearch.rest.action; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -33,7 +33,7 @@ public abstract class RestActionListener implements ActionListener RESPONSE_PARAMS = Collections.singleton(TYPED_KEYS_PARAM); - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(RestSearchAction.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(RestSearchAction.class)); public RestSearchAction(Settings settings, RestController controller) { super(settings); diff --git a/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java b/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java index 35a7c2e60d6..1ce88f7c711 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.script; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; @@ -30,7 +31,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -51,7 +51,7 @@ public final class ScriptMetaData implements MetaData.Custom, Writeable, ToXCont /** * Standard deprecation logger for used to deprecate allowance of empty templates. */ - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ScriptMetaData.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(ScriptMetaData.class)); /** * A builder used to modify the currently stored scripts data held within diff --git a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java index aabef751fc7..7a16c7ad2d5 100644 --- a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -19,6 +19,7 @@ package org.elasticsearch.script; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; @@ -30,7 +31,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ObjectParser; @@ -59,7 +59,7 @@ public class StoredScriptSource extends AbstractDiffable imp /** * Standard deprecation logger for used to deprecate allowance of empty templates. */ - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(StoredScriptSource.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(StoredScriptSource.class)); /** * Standard {@link ParseField} for outer level of stored script source. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java index 9312f9720cd..caea05f30e5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.search.aggregations; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.Comparators; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -528,7 +528,7 @@ public class InternalOrder extends BucketOrder { public static class Parser { private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(Loggers.getLogger(Parser.class)); + new DeprecationLogger(LogManager.getLogger(Parser.class)); /** * Parse a {@link BucketOrder} from {@link XContent}. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index d612014e017..01777292613 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.significant; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; @@ -31,7 +32,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.index.FilterableTermsEnum; import org.elasticsearch.common.lucene.index.FreqTermsEnum; import org.elasticsearch.index.mapper.MappedFieldType; @@ -60,7 +60,8 @@ import java.util.Map; public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFactory implements Releasable { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(SignificantTermsAggregatorFactory.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger( + LogManager.getLogger(SignificantTermsAggregatorFactory.class)); private final IncludeExclude includeExclude; private final String executionHint; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index cc2719e5b96..1b5eaee639e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.aggregations.bucket.terms; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; @@ -47,7 +47,7 @@ import java.util.List; import java.util.Map; public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TermsAggregatorFactory.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(TermsAggregatorFactory.class)); static Boolean REMAP_GLOBAL_ORDS, COLLECT_SEGMENT_ORDS; diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 92ae481a830..60767bbe371 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.builder; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; @@ -29,7 +30,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -78,7 +78,7 @@ import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQuery */ public final class SearchSourceBuilder implements Writeable, ToXContentObject, Rewriteable { private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(Loggers.getLogger(SearchSourceBuilder.class)); + new DeprecationLogger(LogManager.getLogger(SearchSourceBuilder.class)); public static final ParseField FROM_FIELD = new ParseField("from"); public static final ParseField SIZE_FIELD = new ParseField("size"); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java index 398bc847b33..0819dfd74df 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java @@ -18,12 +18,12 @@ */ package org.elasticsearch.search.fetch.subphase; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.SortedNumericDocValues; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -55,7 +55,8 @@ import java.util.stream.Collectors; */ public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(DocValueFieldsFetchSubPhase.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger( + LogManager.getLogger(DocValueFieldsFetchSubPhase.class)); @Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java index 7e6945b9d48..9b11de93bee 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/slice/SliceBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.slice; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -33,7 +34,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -65,7 +65,7 @@ import java.util.Set; */ public class SliceBuilder implements Writeable, ToXContentObject { - private static final DeprecationLogger DEPRECATION_LOG = new DeprecationLogger(Loggers.getLogger(SliceBuilder.class)); + private static final DeprecationLogger DEPRECATION_LOG = new DeprecationLogger(LogManager.getLogger(SliceBuilder.class)); public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField ID_FIELD = new ParseField("id"); diff --git a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 19a62d74443..c4e33fa091b 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -19,13 +19,13 @@ package org.elasticsearch.search.sort; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.SortField; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,7 +50,7 @@ import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; * A sort builder to sort based on a document field. */ public class FieldSortBuilder extends SortBuilder { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(FieldSortBuilder.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(FieldSortBuilder.class)); public static final String NAME = "field_sort"; public static final ParseField MISSING = new ParseField("missing"); diff --git a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java index 2c8c4e234db..07af9ffb10c 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.sort; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; @@ -36,7 +37,6 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -72,7 +72,7 @@ import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; * A geo distance based sorting on a geo point like field. */ public class GeoDistanceSortBuilder extends SortBuilder { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(GeoDistanceSortBuilder.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(GeoDistanceSortBuilder.class)); public static final String NAME = "_geo_distance"; public static final String ALTERNATIVE_NAME = "_geoDistance"; diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 95478e08324..427d262ba9b 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.sort; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorable; @@ -31,7 +32,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -66,7 +66,7 @@ import static org.elasticsearch.search.sort.NestedSortBuilder.NESTED_FIELD; * Script sort builder allows to sort based on a custom script expression. */ public class ScriptSortBuilder extends SortBuilder { - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ScriptSortBuilder.class)); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(ScriptSortBuilder.class)); public static final String NAME = "_script"; public static final ParseField TYPE_FIELD = new ParseField("type"); diff --git a/server/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java b/server/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java index 79424541810..43ddc83ba15 100644 --- a/server/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java +++ b/server/src/main/java/org/elasticsearch/tasks/LoggingTaskListener.java @@ -19,16 +19,16 @@ package org.elasticsearch.tasks; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.logging.Loggers; /** * A TaskListener that just logs the response at the info level. Used when we * need a listener but aren't returning the result to the user. */ public final class LoggingTaskListener implements TaskListener { - private static final Logger logger = Loggers.getLogger(LoggingTaskListener.class); + private static final Logger logger = LogManager.getLogger(LoggingTaskListener.class); /** * Get the instance of NoopActionListener cast appropriately. diff --git a/server/src/main/java/org/elasticsearch/watcher/FileWatcher.java b/server/src/main/java/org/elasticsearch/watcher/FileWatcher.java index 0b0504f4609..e75e73626c1 100644 --- a/server/src/main/java/org/elasticsearch/watcher/FileWatcher.java +++ b/server/src/main/java/org/elasticsearch/watcher/FileWatcher.java @@ -18,9 +18,9 @@ */ package org.elasticsearch.watcher; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.nio.file.Files; @@ -38,7 +38,7 @@ public class FileWatcher extends AbstractResourceWatcher { private FileObserver rootFileObserver; private Path file; - private static final Logger logger = Loggers.getLogger(FileWatcher.class); + private static final Logger logger = LogManager.getLogger(FileWatcher.class); /** * Creates new file watcher on the given directory From 24df2eba805872eea1a159cbc1ce834575d2366c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=8C=AF=E8=8D=9F?= <605951224@qq.com> Date: Thu, 25 Oct 2018 23:22:50 +0900 Subject: [PATCH 63/67] Remove static import from HLRC doc snippet (#34834) --- .../client/documentation/RestClientDocumentation.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java index 90801715b7e..9191f502558 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java @@ -36,7 +36,7 @@ import org.apache.http.nio.entity.NStringEntity; import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; +import org.elasticsearch.client.HttpAsyncResponseConsumerFactory; import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.client.Request; @@ -84,7 +84,8 @@ public class RestClientDocumentation { RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); builder.addHeader("Authorization", "Bearer " + TOKEN); // <1> builder.setHttpAsyncResponseConsumerFactory( // <2> - new HeapBufferedResponseConsumerFactory(30 * 1024 * 1024 * 1024)); + new HttpAsyncResponseConsumerFactory + .HeapBufferedResponseConsumerFactory(30 * 1024 * 1024 * 1024)); COMMON_OPTIONS = builder.build(); } // end::rest-client-options-singleton From d824cbe9929ea97a5fa7006c42dcc8f9cad9474a Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Thu, 25 Oct 2018 08:58:21 -0600 Subject: [PATCH 64/67] Test: ensure char[] doesn't being with prefix (#34816) The testCharsBeginsWith test has a check that a random prefix of length 2 is not the prefix of a char[]. However, there is no check that the char[] is not randomly generated with the same two characters as the prefix. This change ensures that the char[] does not begin with the prefix. Closes #34765 --- .../org/elasticsearch/common/CharArraysTests.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java index 0e3a2179463..368886c7fd3 100644 --- a/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java +++ b/libs/core/src/test/java/org/elasticsearch/common/CharArraysTests.java @@ -43,12 +43,12 @@ public class CharArraysTests extends ESTestCase { assertArrayEquals(expectedChars, convertedChars); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34765") public void testCharsBeginsWith() { assertFalse(CharArrays.charsBeginsWith(randomAlphaOfLength(4), null)); assertFalse(CharArrays.charsBeginsWith(null, null)); assertFalse(CharArrays.charsBeginsWith(null, randomAlphaOfLength(4).toCharArray())); - assertFalse(CharArrays.charsBeginsWith(randomAlphaOfLength(2), randomAlphaOfLengthBetween(3, 8).toCharArray())); + final String undesiredPrefix = randomAlphaOfLength(2); + assertFalse(CharArrays.charsBeginsWith(undesiredPrefix, randomAlphaOfLengthNotBeginningWith(undesiredPrefix, 3, 8))); final String prefix = randomAlphaOfLengthBetween(2, 4); assertTrue(CharArrays.charsBeginsWith(prefix, prefix.toCharArray())); @@ -73,4 +73,12 @@ public class CharArraysTests extends ESTestCase { assertFalse(CharArrays.constantTimeEquals(value, other)); assertFalse(CharArrays.constantTimeEquals(value.toCharArray(), other.toCharArray())); } + + private char[] randomAlphaOfLengthNotBeginningWith(String undesiredPrefix, int min, int max) { + char[] nonMatchingValue; + do { + nonMatchingValue = randomAlphaOfLengthBetween(min, max).toCharArray(); + } while (new String(nonMatchingValue).startsWith(undesiredPrefix)); + return nonMatchingValue; + } } From 3225b2dcd368c4e604567d8c604491c05904216d Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Thu, 25 Oct 2018 17:30:25 +0200 Subject: [PATCH 65/67] Add 6.6.0 version to master (#34847) This commit adds the 6.6.0 version constant to the master branch, and adapts the VersionTests. --- .../main/java/org/elasticsearch/Version.java | 4 +++ .../java/org/elasticsearch/VersionTests.java | 5 ++-- .../transport/TcpTransportTests.java | 28 ++++++++++--------- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 5a921098b4c..0f636f76d8a 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -107,6 +107,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_6_4_3 = new Version(V_6_4_3_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_6_5_0_ID = 6050099; public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); + public static final int V_6_6_0_ID = 6060099; + public static final Version V_6_6_0 = new Version(V_6_6_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); public static final int V_7_0_0_alpha1_ID = 7000001; public static final Version V_7_0_0_alpha1 = new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); @@ -125,6 +127,8 @@ public class Version implements Comparable, ToXContentFragment { switch (id) { case V_7_0_0_alpha1_ID: return V_7_0_0_alpha1; + case V_6_6_0_ID: + return V_6_6_0; case V_6_5_0_ID: return V_6_5_0; case V_6_4_3_ID: diff --git a/server/src/test/java/org/elasticsearch/VersionTests.java b/server/src/test/java/org/elasticsearch/VersionTests.java index c0d29e86fd6..e1a7633e1d0 100644 --- a/server/src/test/java/org/elasticsearch/VersionTests.java +++ b/server/src/test/java/org/elasticsearch/VersionTests.java @@ -181,7 +181,7 @@ public class VersionTests extends ESTestCase { // from 7.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is // released since we need to bump the supported minor in Version#minimumCompatibilityVersion() - Version lastVersion = Version.V_6_5_0; // TODO: remove this once min compat version is a constant instead of method + Version lastVersion = Version.V_6_6_0; // TODO: remove this once min compat version is a constant instead of method assertEquals(lastVersion.major, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().major); assertEquals("did you miss to bump the minor in Version#minimumCompatibilityVersion()", lastVersion.minor, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().minor); @@ -340,7 +340,8 @@ public class VersionTests extends ESTestCase { public void testIsCompatible() { assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion())); - assertTrue(isCompatible(Version.V_6_5_0, Version.V_7_0_0_alpha1)); + assertFalse(isCompatible(Version.V_6_5_0, Version.V_7_0_0_alpha1)); + assertTrue(isCompatible(Version.V_6_6_0, Version.V_7_0_0_alpha1)); assertFalse(isCompatible(Version.fromId(2000099), Version.V_7_0_0_alpha1)); assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_5_0)); assertFalse(isCompatible(Version.fromString("7.0.0"), Version.fromString("8.0.0"))); diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index c6fb1f406cf..a17103789f2 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -156,27 +156,29 @@ public class TcpTransportTests extends ESTestCase { TcpTransport.ensureVersionCompatibility(VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT), Version.CURRENT, randomBoolean()); - TcpTransport.ensureVersionCompatibility(Version.fromString("6.0.0"), Version.fromString("7.0.0"), true); + final Version version = Version.fromString("7.0.0"); + TcpTransport.ensureVersionCompatibility(Version.fromString("6.0.0"), version, true); IllegalStateException ise = expectThrows(IllegalStateException.class, () -> - TcpTransport.ensureVersionCompatibility(Version.fromString("6.0.0"), Version.fromString("7.0.0"), false)); - assertEquals("Received message from unsupported version: [6.0.0] minimal compatible version is: [6.5.0]", ise.getMessage()); + TcpTransport.ensureVersionCompatibility(Version.fromString("6.0.0"), version, false)); + assertEquals("Received message from unsupported version: [6.0.0] minimal compatible version is: [" + + version.minimumCompatibilityVersion() + "]", ise.getMessage()); // For handshake we are compatible with N-2 - TcpTransport.ensureVersionCompatibility(Version.fromString("5.6.0"), Version.fromString("7.0.0"), true); + TcpTransport.ensureVersionCompatibility(Version.fromString("5.6.0"), version, true); ise = expectThrows(IllegalStateException.class, () -> - TcpTransport.ensureVersionCompatibility(Version.fromString("5.6.0"), Version.fromString("7.0.0"), false)); - assertEquals("Received message from unsupported version: [5.6.0] minimal compatible version is: [6.5.0]", - ise.getMessage()); + TcpTransport.ensureVersionCompatibility(Version.fromString("5.6.0"), version, false)); + assertEquals("Received message from unsupported version: [5.6.0] minimal compatible version is: [" + + version.minimumCompatibilityVersion() + "]", ise.getMessage()); ise = expectThrows(IllegalStateException.class, () -> - TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), Version.fromString("7.0.0"), true)); - assertEquals("Received handshake message from unsupported version: [2.3.0] minimal compatible version is: [6.5.0]", - ise.getMessage()); + TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), version, true)); + assertEquals("Received handshake message from unsupported version: [2.3.0] minimal compatible version is: [" + + version.minimumCompatibilityVersion() + "]", ise.getMessage()); ise = expectThrows(IllegalStateException.class, () -> - TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), Version.fromString("7.0.0"), false)); - assertEquals("Received message from unsupported version: [2.3.0] minimal compatible version is: [6.5.0]", - ise.getMessage()); + TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), version, false)); + assertEquals("Received message from unsupported version: [2.3.0] minimal compatible version is: [" + + version.minimumCompatibilityVersion() + "]", ise.getMessage()); } public void testCompressRequest() throws IOException { From c0c6a28e863526da7539ec4a968ac231fa44afc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 25 Oct 2018 17:59:59 +0200 Subject: [PATCH 66/67] [Docs] Add `indices.query.bool.max_clause_count` setting (#34779) This change adds a section about the global search setting `indices.query.bool.max_clause_count` that limits the number of boolean clauses allowed in a Lucene BooleanQuery. Closes #19858 --- docs/reference/modules/indices.asciidoc | 6 ++++++ .../modules/indices/search-settings.asciidoc | 16 ++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 docs/reference/modules/indices/search-settings.asciidoc diff --git a/docs/reference/modules/indices.asciidoc b/docs/reference/modules/indices.asciidoc index 5f7bb7b9aba..33ab7ecb4a8 100644 --- a/docs/reference/modules/indices.asciidoc +++ b/docs/reference/modules/indices.asciidoc @@ -30,6 +30,10 @@ Available settings include: Control the resource limits on the shard recovery process. +<>:: + + Control global search settings. + include::indices/circuit_breaker.asciidoc[] include::indices/fielddata.asciidoc[] @@ -42,3 +46,5 @@ include::indices/request_cache.asciidoc[] include::indices/recovery.asciidoc[] +include::indices/search-settings.asciidoc[] + diff --git a/docs/reference/modules/indices/search-settings.asciidoc b/docs/reference/modules/indices/search-settings.asciidoc new file mode 100644 index 00000000000..ad75de1291c --- /dev/null +++ b/docs/reference/modules/indices/search-settings.asciidoc @@ -0,0 +1,16 @@ +[[search-settings]] +=== Search Settings + +The following _expert_ setting can be set to manage global search limits. + +`indices.query.bool.max_clause_count`:: + Defaults to `1024`. + +This setting limits the number of clauses a Lucene BooleanQuery can have. The +default of 1024 is quite high and should normally be sufficient. This limit does +not only affect Elasticsearchs `bool` query, but many other queries are rewritten to Lucene's +BooleanQuery internally. The limit is in place to prevent searches from becoming to large +and taking up too much CPU and memory. In case you consider to increase this setting, +make sure you exhausted all other options to avoid having to do this. Higher values can lead +to performance degradations and memory issues, especially in clusters with a high load or +few resources. From 70871b5af789f6f4b499122bc302744f32588c29 Mon Sep 17 00:00:00 2001 From: Christophe Bismuth Date: Thu, 25 Oct 2018 18:12:50 +0200 Subject: [PATCH 67/67] Check self references in metric agg after last doc collection (#33593) (#34001) * Check self references in metric agg after last doc collection (#33593) * Revert 0aff5a30c5dbad9f476be14f34b81e2d1991bb0f (#33593) * Check self refs in metric agg only once in post collection hook (#33593) * Remove unnecessary mocking (#33593) --- .../aggregations/metrics/ScriptedMetricAggregator.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java index 345b21d0388..da936a76ee1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java @@ -80,7 +80,6 @@ class ScriptedMetricAggregator extends MetricsAggregator { leafMapScript.setDocument(doc); leafMapScript.execute(); - CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs map script"); } }; } @@ -103,4 +102,10 @@ class ScriptedMetricAggregator extends MetricsAggregator { return new InternalScriptedMetric(name, null, reduceScript, pipelineAggregators(), metaData()); } + @Override + protected void doPostCollection() throws IOException { + CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs map script"); + + super.doPostCollection(); + } }